repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
pjotrligthart/openmoo2-unofficial
|
game/gui/screen.py
|
Python
|
gpl-2.0
| 8,413
| 0.006894
|
import time
from pygame.locals import *
import gui
MOUSE_LEFT_BUTTON = 1
MOUSE_MIDDLE_BUTTON = 2
MOUSE_RIGHT_BUTTON = 3
MOUSE_WHEELUP = 4
MOUSE_WHEELDOWN = 5
class Screen(object):
"""Base gui screen class
every game screen class should inherit from this one
"""
__triggers = []
__old_hover = None
__hover = None
__hover_changed = False
def __init__(self):
pass
def log_info(self, message):
"""Prints an INFO message to standard output"""
ts = int(time.time())
print("# INFO %i ... %s" % (ts, message))
def log_error(self, message):
"""Prints an ERROR message to standard output"""
ts = int(time.time())
print("! ERROR %i ... %s" % (ts, message))
def reset_triggers_list(self):
"""Clears the screen's trigger list"""
self.__triggers = []
def add_trigger(self, trigger):
"""Appends given trigger to the end of screen's trigger list"""
if not trigger.has_key('hover_id'):
trigger['hover_id'] = None
self.__triggers.append(trigger)
def list_triggers(self):
"""Returns the screen's list of triggers"""
return self.__triggers
def get_timestamp(self, zoom = 1):
"""Returns an actual timestamp"""
return int(time.time() * zoom)
def get_image(self, img_key, subkey1 = None, subkey2 = None, subkey3 = None):
"""Returns an image object from GUI engine, identified by its key(s)"""
return gui.GUI.get_image(img_key, subkey1, subkey2, subkey3)
def redraw_flip(self):
"""Redraws the screen, takes care about mouse cursor and flips the graphic buffer to display"""
self.draw()
gui.GUI.highlight_triggers(self.list_triggers())
gui.GUI.flip()
def redraw_noflip(self):
"""Redraws the screen, takes care about mouse cursor but doesn't flip the buffer to display"""
self.draw()
gui.GUI.highlight_triggers(self.list_triggers())
def prepare(self):
"""This method should be implemented by screens that require some
special actions each time before the screen is run.
For example to reset screen to a well known state to prevent unexpected behaviour.
"""
pass
def draw(self):
"""All static graphic output should be implemented in this method.
Unless there is only a dynamic graphic (animations),
every screen should implement this method.
"""
pass
def animate(self):
"""Entry point for Scre
|
en animations, e.g. ship trajectory on MainScreen.
GUI engine calls this method periodically
Animations should be time-dependant - such screens have to implement the timing!
"""
|
pass
def get_escape_trigger(self):
"""Returns standard trigger for sending escape action"""
return {'action': "ESCAPE"}
def on_mousebuttonup(self, event):
"""Default implementation of mouse click event serving.
Checks the mouse wheel events (up and down scrolling) and regular mouse buttons.
If the event's subject is the left mouse button it checks the mouse position against the trigger list and
returns the first trigger where mouse positions is within its rectangle.
There is a good chance that no screen would have to override this method.
"""
if event.button == MOUSE_MIDDLE_BUTTON:
print event
elif event.button == MOUSE_WHEELUP:
return {'action': "SCROLL_UP"}
elif event.button == MOUSE_WHEELDOWN:
return {'action': "SCROLL_DOWN"}
else:
triggers_list = self.list_triggers()
for trigger in triggers_list:
if trigger['rect'].collidepoint(event.pos):
if event.button == MOUSE_LEFT_BUTTON:
trigger['mouse_pos'] = event.pos
return trigger
elif event.button == MOUSE_RIGHT_BUTTON:
return {'action': "help", 'help': trigger['action']}
def on_keydown(self, event):
"""Default implementation of a keyboard event handling.
If keypress is detected by a GUI engine it calls this method.
The pressed key is checked against the trigger list.
Returns the first trigger where the key matches the pressed or
None if no trigger matches the keypress
There is a good chance that no screen would have to override this method.
"""
print("@ screen.Screen::on_keydown()")
print(" scancode = %i" % event.scancode)
print(" key = %i" % event.key)
if event.key == K_ESCAPE:
return {'action': "ESCAPE"}
else:
triggers_list = self.list_triggers()
for trigger in triggers_list:
if trigger.has_key('key') and trigger['key'] == event.key:
return trigger
return {'action': "key", 'key': event.key}
def update_hover(self, mouse_pos):
"""This method is invoked by a GUI engine on every pure mouse move
and right before the screen's on_mousemotion() method.
Mouse position is checked against screen's trigger list.
If hover is detected (=mouse position is inside the trigger's rectangle)
the trigger is copied and can be returned by get_hover() method
Also if the previously stored value is different than the new one,
the __hover_changed flag is set to True
The idea is to handle mouse hover detection separately,
so other methods could rely on get_hover() and hover_changed() methods.
Probably no screen should require to override this method.
"""
for trigger in self.list_triggers():
if trigger.has_key('hover_id') and trigger['rect'].collidepoint(mouse_pos):
if self.__hover != trigger:
self.__hover_changed = True
self.__hover = trigger
break
def get_hover(self):
"""Returns the current hover trigger"""
return self.__hover
def hover_changed(self):
"""Returns True if screen's hover has changed since last call of this method"""
if self.__hover_changed:
self.__hover_changed = False
return True
else:
return False
def on_mousemotion(self, event):
"""Invoked by a GUI engine on every pure (non-dragging) mouse move.
Currently no screen requires to override this empty implementation.
"""
pass
def get_drag_item(self, mouse_pos):
""""""
for trigger in self.list_triggers():
if trigger.has_key('drag_id') and trigger['rect'].collidepoint(mouse_pos):
return trigger['drag_id']
return None
def on_mousedrag(self, drag_item, pos, rel):
"""Invoked by a GUI engine when left mouse button is being held, drag item is set and mouse moves"""
pass
def on_mousedrop(self, drag_item, (mouse_x, mouse_y)):
"""Invoked by a GUI engine when mouse dragging stops
(drag item was set and left mouse button was released).
"""
pass
def process_trigger(self, trigger):
"""Empty implementation of a trigger handling
If a screen trigger is positively evaluated
(e.g. returned from on_mousebuttonup() or on_keydown() methods)
it's passed as a trigger argument to this method
Every screen should override this method to handle the proper actions.
"""
pass
def enter(self):
""" Called by GUI engine right before gui_client::run_screen() is invoked
Suitable for saving initial state that can be reveresed by the screen's cancel() method
"""
pass
def leave_confirm(self):
""" Called by GUI engine when CONFIRM trigger is activated
Every screen that sends data to the game server should implement this method
"""
pass
|
jsirois/pants
|
src/python/pants/backend/python/util_rules/pex_environment.py
|
Python
|
apache-2.0
| 10,132
| 0.003652
|
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
from dataclasses import dataclass
from pathlib import Path
from textwrap import dedent
from typing import Mapping, Optional, Tuple, cast
from pants.core.util_rules import subprocess_environment
from pants.core.util_rules.subprocess_environment import SubprocessEnvironmentVars
from pants.engine import process
from pants.engine.engine_aware import EngineAwareReturnType
from pants.engine.environment import Environment, EnvironmentRequest
from pants.engine.process import BinaryPath, BinaryPathRequest, BinaryPaths, BinaryPathTest
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.option.global_options import GlobalOptions
from pants.option.subsystem import Subsystem
from pants.python.python_setup import PythonSetup
from pants.util.frozendict import FrozenDict
from pants.util.logging import LogLevel
from pants.util.memo import memoized_method
from pants.util.ordered_set import OrderedSet
from pants.util.strutil import create_path_env_var
class PexRuntimeEnvironment(Subsystem):
options_scope = "pex"
help = "How Pants uses Pex to run Python subprocesses."
@classmethod
def register_options(cls, register):
super().register_options(register)
# TODO(#9760): We'll want to deprecate this in favor of a global option which allows for a
# per-process override.
register(
"--executable-search-paths",
advanced=True,
type=list,
default=["<PATH>"],
metavar="<binary-paths>",
help=(
"The PATH value that will be used by the PEX subprocess and any subprocesses it "
'spawns.\n\nThe special string "<PATH>" will expand to the contents of the PATH '
"env var."
),
)
register(
"--bootstrap-interpreter-names",
advanced=True,
type=list,
default=["python", "python3", "python2"],
metavar="<bootstrap-python-names>",
help=(
"The names of Python binaries to search for to bootstrap PEX files with.\n\nThis "
"does not impact which Python interpreter is used to run your code, only what is "
"used to run the PEX tool. See the `interpreter_search_paths` option in "
"`[python-setup]` to influence where interpreters are searched for."
),
)
register(
"--verbosity",
advanced=True,
type=int,
default=0,
help=(
"Set the verbosity level of PEX logging, from 0 (no logging) up to 9 (max logging)."
),
)
@memoized_method
def path(self, env: Environment) -> Tuple[str, ...]:
def iter_path_entries():
for entry in self.options.executable_search_paths:
if entry == "<PATH>":
path = env.get("PATH")
if path:
for path_entry in path.split(os.pathsep):
yield path_entry
else:
yield entry
return tuple(OrderedSet(iter_path_entries()))
@property
def bootstrap_interpreter_names(self) -> Tuple[str, ...]:
return tuple(self.options.bootstrap_interpreter_names)
@property
def verbosity(self) -> int:
level = cast(int, self.options.verbosity)
if level < 0 or level > 9:
raise ValueError("verbosity level must be between 0 and 9")
return level
class PythonExecutable(BinaryPath, EngineAwareReturnType):
"""The BinaryPath of a Python executable."""
def message(self) -> str:
return f"Selected {self.path} to run PEXes with."
@dataclass(frozen=True)
class PexEnvironment(EngineAwareReturnType):
path: Tuple[str, ...]
interpreter_search_paths: Tuple[str, ...]
subprocess_environment_dict: FrozenDict[str, str]
named_caches_dir: str
bootstrap_python: Optional[PythonExecutable
|
] = None
def create_argv(
self, pex_filepath: str, *args: str, python: Optional[PythonExecutable] = None
) -> Tuple[str, ...]:
python = python or self.bootst
|
rap_python
if python:
return (python.path, pex_filepath, *args)
if os.path.basename(pex_filepath) == pex_filepath:
return (f"./{pex_filepath}", *args)
return (pex_filepath, *args)
def environment_dict(self, *, python_configured: bool) -> Mapping[str, str]:
"""The environment to use for running anything with PEX.
If the Process is run with a pre-selected Python interpreter, set `python_configured=True`
to avoid PEX from trying to find a new interpreter.
"""
d = dict(
PATH=create_path_env_var(self.path),
PEX_INHERIT_PATH="false",
PEX_IGNORE_RCFILES="true",
PEX_ROOT=os.path.join(self.named_caches_dir, "pex_root"),
**self.subprocess_environment_dict,
)
# NB: We only set `PEX_PYTHON_PATH` if the Python interpreter has not already been
# pre-selected by Pants. Otherwise, Pex would inadvertently try to find another interpreter
# when running PEXes. (Creating a PEX will ignore this env var in favor of `--python-path`.)
if not python_configured:
d["PEX_PYTHON_PATH"] = create_path_env_var(self.interpreter_search_paths)
return d
def level(self) -> LogLevel:
return LogLevel.DEBUG if self.bootstrap_python else LogLevel.WARN
def message(self) -> str:
if not self.bootstrap_python:
return (
"No bootstrap Python executable could be found from the option "
"`interpreter_search_paths` in the `[python-setup]` scope. Will attempt to run "
"PEXes directly."
)
return f"Selected {self.bootstrap_python.path} to bootstrap PEXes with."
@rule(desc="Find Python interpreter to bootstrap PEX", level=LogLevel.DEBUG)
async def find_pex_python(
python_setup: PythonSetup,
pex_runtime_env: PexRuntimeEnvironment,
subprocess_env_vars: SubprocessEnvironmentVars,
global_options: GlobalOptions,
) -> PexEnvironment:
pex_relevant_environment = await Get(
Environment, EnvironmentRequest(["PATH", "HOME", "PYENV_ROOT"])
)
# PEX files are compatible with bootstrapping via Python 2.7 or Python 3.5+. The bootstrap
# code will then re-exec itself if the underlying PEX user code needs a more specific python
# interpreter. As such, we look for many Pythons usable by the PEX bootstrap code here for
# maximum flexibility.
all_python_binary_paths = await MultiGet(
Get(
BinaryPaths,
BinaryPathRequest(
search_path=python_setup.interpreter_search_paths(pex_relevant_environment),
binary_name=binary_name,
test=BinaryPathTest(
args=[
"-c",
# N.B.: The following code snippet must be compatible with Python 2.7 and
# Python 3.5+.
#
# We hash the underlying Python interpreter executable to ensure we detect
# changes in the real interpreter that might otherwise be masked by Pyenv
# shim scripts found on the search path. Naively, just printing out the full
# version_info would be enough, but that does not account for supported abi
# changes (e.g.: a pyenv switch from a py27mu interpreter to a py27m
# interpreter.)
#
# When hashing, we pick 8192 for efficiency of reads and fingerprint updates
# (writes) since it's a common OS buffer size and an even multiple of the
# hash block size.
dedent(
"
|
sramkrishna/eidisi
|
scripts/meson_post_install.py
|
Python
|
gpl-3.0
| 838
| 0.00358
|
#!/usr/bin/env python3
import os
import pathlib
import sysconfig
import compileall
import subproc
|
ess
prefix = pathlib.Path(os.environ.get('MESON_INSTALL_PREFIX', '/usr/local'))
datadir = prefix / 'share'
destdir = os.environ.get('DESTDIR', '')
if not destdir:
print('Compiling gsettings schemas...')
subprocess.call(['glib-compile-schemas', str(datadir / 'glib-2.0' / 'schemas')])
print('Updating icon cache...')
subprocess.call(['gtk-update-icon-cache', '-qtf', str(datadir / 'icons' /
|
'hicolor')])
print('Updating desktop database...')
subprocess.call(['update-desktop-database', '-q', str(datadir / 'applications')])
print('Compiling python bytecode...')
moduledir = sysconfig.get_path('purelib', vars={'base': str(prefix)})
compileall.compile_dir(destdir + os.path.join(moduledir, 'eidisi'), optimize=2)
|
gangadhar-kadam/mtn-erpnext
|
patches/may_2012/std_pf_readonly.py
|
Python
|
agpl-3.0
| 718
| 0.044568
|
from __future__ import unicode_literals
def execute():
"""Make
|
standard print formats readonly for system ma
|
nager"""
import webnotes.model.doc
new_perms = [
{
'parent': 'Print Format',
'parentfield': 'permissions',
'parenttype': 'DocType',
'role': 'System Manager',
'permlevel': 1,
'read': 1,
},
{
'parent': 'Print Format',
'parentfield': 'permissions',
'parenttype': 'DocType',
'role': 'Administrator',
'permlevel': 1,
'read': 1,
'write': 1
},
]
for perms in new_perms:
doc = webnotes.model.doc.Document('DocPerm')
doc.fields.update(perms)
doc.save()
webnotes.conn.commit()
webnotes.conn.begin()
webnotes.reload_doc('core', 'doctype', 'print_format')
|
dims/neutron
|
neutron/db/bgp_dragentscheduler_db.py
|
Python
|
apache-2.0
| 9,018
| 0
|
# Copyright 2016 Huawei Technologies India Pvt. Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc
from neutron._i18n import _
from neutron._i18n import _LW
from neutron.db import agents_db
from neutron.db import agentschedulers_db as as_db
from neutron.db import model_base
from neutron.extensions import bgp_dragentscheduler as bgp_dras_ext
from neutron.services.bgp.common import constants as bgp_consts
LOG = logging.getLogger(__name__)
BGP_DRAGENT_SCHEDULER_OPTS = [
cfg.StrOpt(
|
'bgp_drscheduler_driver',
default='neutron.services.bgp.scheduler'
'.bgp_dragent_scheduler.ChanceScheduler',
help=_('Driver used for scheduling BGP speakers to BGP DrAgent'))
]
cfg.CONF.register_opts(BGP_DRAGENT_SCHEDULER_OPTS)
class BgpSpeakerDrAgentBinding(model_base.BASEV2):
"""Represents a mapping between BGP spe
|
aker and BGP DRAgent"""
__tablename__ = 'bgp_speaker_dragent_bindings'
bgp_speaker_id = sa.Column(sa.String(length=36),
sa.ForeignKey("bgp_speakers.id",
ondelete='CASCADE'),
nullable=False)
dragent = orm.relation(agents_db.Agent)
agent_id = sa.Column(sa.String(length=36),
sa.ForeignKey("agents.id",
ondelete='CASCADE'),
primary_key=True)
class BgpDrAgentSchedulerDbMixin(bgp_dras_ext.BgpDrSchedulerPluginBase,
as_db.AgentSchedulerDbMixin):
bgp_drscheduler = None
def schedule_unscheduled_bgp_speakers(self, context, host):
if self.bgp_drscheduler:
return self.bgp_drscheduler.schedule_unscheduled_bgp_speakers(
context, host)
else:
LOG.warning(_LW("Cannot schedule BgpSpeaker to DrAgent. "
"Reason: No scheduler registered."))
def schedule_bgp_speaker(self, context, created_bgp_speaker):
if self.bgp_drscheduler:
agents = self.bgp_drscheduler.schedule(context,
created_bgp_speaker)
for agent in agents:
self._bgp_rpc.bgp_speaker_created(context,
created_bgp_speaker['id'],
agent.host)
else:
LOG.warning(_LW("Cannot schedule BgpSpeaker to DrAgent. "
"Reason: No scheduler registered."))
def add_bgp_speaker_to_dragent(self, context, agent_id, speaker_id):
"""Associate a BgpDrAgent with a BgpSpeaker."""
try:
self._save_bgp_speaker_dragent_binding(context,
agent_id,
speaker_id)
except db_exc.DBDuplicateEntry:
raise bgp_dras_ext.DrAgentAssociationError(
agent_id=agent_id)
LOG.debug('BgpSpeaker %(bgp_speaker_id)s added to '
'BgpDrAgent %(agent_id)s',
{'bgp_speaker_id': speaker_id, 'agent_id': agent_id})
def _save_bgp_speaker_dragent_binding(self, context,
agent_id, speaker_id):
with context.session.begin(subtransactions=True):
agent_db = self._get_agent(context, agent_id)
agent_up = agent_db['admin_state_up']
is_agent_bgp = (agent_db['agent_type'] ==
bgp_consts.AGENT_TYPE_BGP_ROUTING)
if not is_agent_bgp or not agent_up:
raise bgp_dras_ext.DrAgentInvalid(id=agent_id)
binding = BgpSpeakerDrAgentBinding()
binding.bgp_speaker_id = speaker_id
binding.agent_id = agent_id
context.session.add(binding)
self._bgp_rpc.bgp_speaker_created(context, speaker_id, agent_db.host)
def remove_bgp_speaker_from_dragent(self, context, agent_id, speaker_id):
with context.session.begin(subtransactions=True):
agent_db = self._get_agent(context, agent_id)
is_agent_bgp = (agent_db['agent_type'] ==
bgp_consts.AGENT_TYPE_BGP_ROUTING)
if not is_agent_bgp:
raise bgp_dras_ext.DrAgentInvalid(id=agent_id)
query = context.session.query(BgpSpeakerDrAgentBinding)
query = query.filter_by(bgp_speaker_id=speaker_id,
agent_id=agent_id)
num_deleted = query.delete()
if not num_deleted:
raise bgp_dras_ext.DrAgentNotHostingBgpSpeaker(
bgp_speaker_id=speaker_id,
agent_id=agent_id)
LOG.debug('BgpSpeaker %(bgp_speaker_id)s removed from '
'BgpDrAgent %(agent_id)s',
{'bgp_speaker_id': speaker_id,
'agent_id': agent_id})
self._bgp_rpc.bgp_speaker_removed(context, speaker_id, agent_db.host)
def get_dragents_hosting_bgp_speakers(self, context, bgp_speaker_ids,
active=None, admin_state_up=None):
query = context.session.query(BgpSpeakerDrAgentBinding)
query = query.options(orm.contains_eager(
BgpSpeakerDrAgentBinding.dragent))
query = query.join(BgpSpeakerDrAgentBinding.dragent)
if len(bgp_speaker_ids) == 1:
query = query.filter(
BgpSpeakerDrAgentBinding.bgp_speaker_id == (
bgp_speaker_ids[0]))
elif bgp_speaker_ids:
query = query.filter(
BgpSpeakerDrAgentBinding.bgp_speaker_id in bgp_speaker_ids)
if admin_state_up is not None:
query = query.filter(agents_db.Agent.admin_state_up ==
admin_state_up)
return [binding.dragent
for binding in query
if as_db.AgentSchedulerDbMixin.is_eligible_agent(
active, binding.dragent)]
def get_dragent_bgp_speaker_bindings(self, context):
return context.session.query(BgpSpeakerDrAgentBinding).all()
def list_dragent_hosting_bgp_speaker(self, context, speaker_id):
dragents = self.get_dragents_hosting_bgp_speakers(context,
[speaker_id])
agent_ids = [dragent.id for dragent in dragents]
if not agent_ids:
return {'agents': []}
return {'agents': self.get_agents(context, filters={'id': agent_ids})}
def list_bgp_speaker_on_dragent(self, context, agent_id):
query = context.session.query(BgpSpeakerDrAgentBinding.bgp_speaker_id)
query = query.filter_by(agent_id=agent_id)
bgp_speaker_ids = [item[0] for item in query]
if not bgp_speaker_ids:
# Exception will be thrown if the requested agent does not exist.
self._get_agent(context, agent_id)
return {'bgp_speakers': []}
return {'bgp_speakers':
self.get_bgp_speakers(context,
filters={'id': bgp_speaker_ids})}
def get_bgp_speakers_for_agent_host(self, context, host):
agent = self._get_agent_by_type_and_host(
context, bgp_consts.AGENT_TYPE_BGP_ROUTING, host)
if not agent.adm
|
antoinecarme/sklearn2sql_heroku
|
tests/regression/freidman1/ws_freidman1_MLPRegressor_mysql_code_gen.py
|
Python
|
bsd-3-clause
| 128
| 0.015625
|
from
|
sklearn2sql_heroku.tests.regression import generic as reg_gen
reg_gen.test_model("MLPRegressor" , "freidman1" , "
|
mysql")
|
Ziqi-Li/bknqgis
|
bokeh/bokeh/server/tests/test_server.py
|
Python
|
gpl-2.0
| 22,987
| 0.002871
|
from __future__ import absolute_import
from datetime import timedelta
import pytest
import logging
import re
import mock
from tornado import gen
from tornado.ioloop import PeriodicCallback, IOLoop
from tornado.httpclient import HTTPError
import bokeh.server.server as server
from bokeh.application import Application
from bokeh.application.handlers import Handler
from bokeh.model import Model
from bokeh.core.properties import List, String
from bokeh.client import pull_session
from bokeh.server.server import Server
from bokeh.util.session_id import check_session_id_signature
from .utils import ManagedServerLoop, url, ws_url, http_get, websocket_open
logging.basicConfig(level=logging.DEBUG)
def test__create_hosts_whitelist_no_host():
hosts = server._create_hosts_whitelist(None, 1000)
assert hosts == ["localhost:1000"]
hosts = server._create_hosts_whitelist([], 1000)
assert hosts == ["localhost:1000"]
def test__create_hosts_whitelist_host_value_with_port_use_port():
hosts = server._create_hosts_whitelist(["foo:1000"], 1000)
assert hosts == ["foo:1000"]
hosts = server._create_hosts_whitelist(["foo:1000","bar:2100"], 1000)
assert hosts == ["foo:1000","bar:2100"]
def test__create_hosts_whitelist_host_without_port_use_port_80():
hosts = server._create_hosts_whitelist(["foo"], 1000)
assert hosts == ["foo:80"]
hosts = server._create_hosts_whitelist(["foo","bar"], 1000)
assert hosts == ["foo:80","bar:80"]
def test__create_hosts_whitelist_host_non_int_port_raises():
with pytest.raises(ValueError):
server._create_hosts_whitelist(["foo:xyz"], 1000)
def test__create_hosts_whitelist_bad_host_raises():
with pytest.raises(ValueError):
server._create_hosts_whitelist([""], 1000)
with pytest.raises(ValueError):
server._create_hosts_whitelist(["a:b:c"], 1000)
with pytest.raises(ValueError):
server._create_hosts_whitelist([":80"], 1000)
@gen.coroutine
def async_value(value):
yield gen.moment # this ensures we actually return to the loop
raise gen.Return(value)
class HookListModel(Model):
hooks = List(String)
class HookTestHandler(Handler):
def __init__(self):
super(HookTestHandler, self).__init__()
self.load_count = 0
self.unload_count = 0
self.session_creation_async_value = 0
self.hooks = []
self.server_periodic_remover = None
self.session_periodic_remover = None
def modify_document(self, doc):
# this checks that the session created hook has run
# and session destroyed has not.
assert self.session_creation_async_value == 3
doc.title = "Modified"
doc.roots[0].hooks.append("modify")
self.hooks.append("modify")
def on_server_loaded(self, server_context):
assert len(server_context.sessions) == 0
self.load_count += 1
self.hooks.append("server_loaded")
server_context.add_next_tick_callback(self.on_next_tick_server)
server_context.add_timeout_callback(self.on_timeout_server, 2)
server_context.add_periodic_callback(self.on_periodic_server, 3)
def remover():
server_context.remove_periodic_callback(self.on_periodic_server)
self.server_periodic_remover = remover
def on_server_unloaded(self, server_context):
self.unload_count += 1
self.hooks.append("server_unloaded")
# important to test that this can be async
@gen.coroutine
def on_session_created(self, session_context):
@gen.coroutine
def setup_document(doc):
# session creation hook is allowed to init the document
# before any modify_document() handlers kick in
from bokeh.document import DEFAULT_TITLE
hook_list = HookListModel()
assert doc.title == DEFAULT_TITLE
assert len(doc.roots) == 0
hook_list.hooks.append("session_created")
doc.add_root(hook_list)
self.session_creation_async_value = yield async_value(1)
self.session_creation_async_value = yield async_value(2)
self.session_creation_async_value = yield async_value(3)
yield session_context.with_locked_document(setup_document)
|
server_context = session_context.server_context
server_context.add_next_tick_callback(self.on_next_tick_session)
server_context.add_timeout_callback(self.on_timeout_session, 2)
server_context.add_periodic_callback(self.on_periodic_session, 3)
def re
|
mover():
server_context.remove_periodic_callback(self.on_periodic_session)
self.session_periodic_remover = remover
self.hooks.append("session_created")
# this has to be async too
@gen.coroutine
def on_session_destroyed(self, session_context):
@gen.coroutine
def shutdown_document(doc):
doc.roots[0].hooks.append("session_destroyed")
self.session_creation_async_value = yield async_value(4)
self.session_creation_async_value = yield async_value(5)
self.session_creation_async_value = yield async_value(6)
yield session_context.with_locked_document(shutdown_document)
self.hooks.append("session_destroyed")
def on_next_tick_server(self):
self.hooks.append("next_tick_server")
def on_timeout_server(self):
self.hooks.append("timeout_server")
def on_periodic_server(self):
self.hooks.append("periodic_server")
self.server_periodic_remover()
def on_next_tick_session(self):
self.hooks.append("next_tick_session")
def on_timeout_session(self):
self.hooks.append("timeout_session")
def on_periodic_session(self):
self.hooks.append("periodic_session")
self.session_periodic_remover()
def test__lifecycle_hooks():
application = Application()
handler = HookTestHandler()
application.add(handler)
with ManagedServerLoop(application, check_unused_sessions_milliseconds=30) as server:
# wait for server callbacks to run before we mix in the
# session, this keeps the test deterministic
def check_done():
if len(handler.hooks) == 4:
server.io_loop.stop()
server_load_checker = PeriodicCallback(check_done, 1,
io_loop=server.io_loop)
server_load_checker.start()
server.io_loop.start()
server_load_checker.stop()
# now we create a session
client_session = pull_session(session_id='test__lifecycle_hooks',
url=url(server),
io_loop=server.io_loop)
client_doc = client_session.document
assert len(client_doc.roots) == 1
server_session = server.get_session('/', client_session.id)
server_doc = server_session.document
assert len(server_doc.roots) == 1
client_session.close()
# expire the session quickly rather than after the
# usual timeout
server_session.request_expiration()
def on_done():
server.io_loop.stop()
server.io_loop.call_later(0.1, on_done)
server.io_loop.start()
assert handler.hooks == ["server_loaded",
"next_tick_server",
"timeout_server",
"periodic_server",
"session_created",
"next_tick_session",
"modify",
"timeout_session",
"periodic_session",
"session_destroyed",
"server_unloaded"]
client_hook_list = client_doc.roots[0]
server_hook_list = server_doc.roots[0]
assert handler.load_count == 1
assert handler.unload_count == 1
assert handler.session_creation_async_value == 6
assert client_doc.title == "Modified"
assert server_doc.title == "Modified"
# the client session doesn't see the event that adds "sessi
|
apark263/tensorflow
|
tensorflow/python/autograph/pyct/static_analysis/type_info_test.py
|
Python
|
apache-2.0
| 6,883
| 0.004794
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for type_info mod
|
ule."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.pyct import anno
from tensorflow.python.autograph.pyct import cfg
from tensorflow.python.autograph.pyct import parser
from tensorflow.python.autograph.pyct import qual_names
from tensorflow.python.autograph.pyct import transformer
from tensorflow.python.autograph.pyct.static_analysis import activity
from tensorflow.python.autograph.pyct.
|
static_analysis import live_values
from tensorflow.python.autograph.pyct.static_analysis import reaching_definitions
from tensorflow.python.autograph.pyct.static_analysis import type_info
from tensorflow.python.client import session
from tensorflow.python.platform import test
from tensorflow.python.training import training
class ScopeTest(test.TestCase):
def test_basic(self):
scope = type_info.Scope(None)
self.assertFalse(scope.hasval('foo'))
scope.setval('foo', 'bar')
self.assertTrue(scope.hasval('foo'))
self.assertFalse(scope.hasval('baz'))
def test_nesting(self):
scope = type_info.Scope(None)
scope.setval('foo', '')
child = type_info.Scope(scope)
self.assertTrue(child.hasval('foo'))
self.assertTrue(scope.hasval('foo'))
child.setval('bar', '')
self.assertTrue(child.hasval('bar'))
self.assertFalse(scope.hasval('bar'))
class TypeInfoResolverTest(test.TestCase):
def _parse_and_analyze(self,
test_fn,
namespace,
arg_types=None):
node, source = parser.parse_entity(test_fn)
entity_info = transformer.EntityInfo(
source_code=source,
source_file=None,
namespace=namespace,
arg_values=None,
arg_types=arg_types,
owner_type=None)
node = qual_names.resolve(node)
graphs = cfg.build(node)
ctx = transformer.Context(entity_info)
node = activity.resolve(node, ctx)
node = reaching_definitions.resolve(node, ctx, graphs,
reaching_definitions.Definition)
node = live_values.resolve(node, ctx, {})
node = type_info.resolve(node, ctx)
node = live_values.resolve(node, ctx, {})
return node
def test_constructor_detection(self):
def test_fn():
opt = training.GradientDescentOptimizer(0.1)
return opt
node = self._parse_and_analyze(test_fn, {'training': training})
call_node = node.body[0].body[0].value
self.assertEquals(training.GradientDescentOptimizer,
anno.getanno(call_node, 'type'))
self.assertEquals((training.__name__, 'GradientDescentOptimizer'),
anno.getanno(call_node, 'type_fqn'))
def test_class_members_of_detected_constructor(self):
def test_fn():
opt = training.GradientDescentOptimizer(0.1)
opt.minimize(0)
node = self._parse_and_analyze(test_fn, {'training': training})
method_call = node.body[0].body[1].value.func
self.assertEquals(training.GradientDescentOptimizer.minimize,
anno.getanno(method_call, 'live_val'))
def test_class_members_in_with_stmt(self):
def test_fn(x):
with session.Session() as sess:
sess.run(x)
node = self._parse_and_analyze(test_fn, {'session': session})
constructor_call = node.body[0].body[0].items[0].context_expr
self.assertEquals(session.Session, anno.getanno(constructor_call, 'type'))
self.assertEquals((session.__name__, 'Session'),
anno.getanno(constructor_call, 'type_fqn'))
method_call = node.body[0].body[0].body[0].value.func
self.assertEquals(session.Session.run, anno.getanno(method_call,
'live_val'))
def test_constructor_data_dependent(self):
def test_fn(x):
if x > 0:
opt = training.GradientDescentOptimizer(0.1)
else:
opt = training.GradientDescentOptimizer(0.01)
opt.minimize(0)
node = self._parse_and_analyze(test_fn, {'training': training})
method_call = node.body[0].body[1].value.func
self.assertFalse(anno.hasanno(method_call, 'live_val'))
def test_parameter_class_members(self):
def test_fn(opt):
opt.minimize(0)
node = self._parse_and_analyze(test_fn, {})
method_call = node.body[0].body[0].value.func
self.assertFalse(anno.hasanno(method_call, 'live_val'))
def test_parameter_class_members_with_value_hints(self):
def test_fn(opt):
opt.minimize(0)
node = self._parse_and_analyze(
test_fn, {},
arg_types={
'opt': (training.GradientDescentOptimizer.__name__,
training.GradientDescentOptimizer)
})
method_call = node.body[0].body[0].value.func
self.assertEquals(training.GradientDescentOptimizer.minimize,
anno.getanno(method_call, 'live_val'))
def test_function_variables(self):
def bar():
pass
def test_fn():
foo = bar
foo()
node = self._parse_and_analyze(test_fn, {'bar': bar})
method_call = node.body[0].body[1].value.func
self.assertFalse(anno.hasanno(method_call, 'live_val'))
def test_nested_members(self):
def test_fn():
foo = training.GradientDescentOptimizer(0.1)
foo.bar.baz()
node = self._parse_and_analyze(test_fn, {'training': training})
method_call = node.body[0].body[1].value.func
self.assertFalse(anno.hasanno(method_call, 'live_val'))
def test_nested_unpacking(self):
class Foo(object):
pass
class Bar(object):
pass
def test_fn():
a, (b, c) = (Foo(), (Bar(), Foo()))
return a, b, c
node = self._parse_and_analyze(test_fn, {'Foo': Foo, 'Bar': Bar})
a, b, c = node.body[0].body[1].value.elts
self.assertEquals(anno.getanno(a, 'type'), Foo)
self.assertEquals(anno.getanno(b, 'type'), Bar)
self.assertEquals(anno.getanno(c, 'type'), Foo)
self.assertFalse(anno.hasanno(a, 'live_val'))
self.assertFalse(anno.hasanno(b, 'live_val'))
self.assertFalse(anno.hasanno(c, 'live_val'))
if __name__ == '__main__':
test.main()
|
radicalbit/ambari
|
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_windows.py
|
Python
|
apache-2.0
| 3,411
| 0.007622
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed
|
on an "AS IS" BASIS,
WI
|
THOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
#Used in subsequent imports from params
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.format import format
from install_params import exclude_packages
from status_params import *
config = Script.get_config()
hadoop_conf_dir = None
hbase_conf_dir = None
hadoop_home = None
try:
hadoop_conf_dir = os.environ["HADOOP_CONF_DIR"]
hbase_conf_dir = os.environ["HBASE_CONF_DIR"]
hadoop_home = os.environ["HADOOP_HOME"]
except:
pass
#directories & files
dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir']
dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']
#decomission
hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", [])
exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
include_file_path = default("/configurations/hdfs-site/dfs.hosts", None)
hdfs_include_file = None
manage_include_files = default("/configurations/hdfs-site/manage.include.files", False)
if include_file_path and manage_include_files:
slave_hosts = default("/clusterHostInfo/slave_hosts", [])
hdfs_include_file = slave_hosts
update_files_only = default("/commandParams/update_files_only",False)
# HDFS High Availability properties
dfs_ha_enabled = False
dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.internal.nameservices", None)
dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
namenode_id = None
namenode_rpc = None
hostname = config["hostname"]
if dfs_ha_namenode_ids:
dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
if dfs_ha_namenode_ids_array_len > 1:
dfs_ha_enabled = True
if dfs_ha_enabled:
for nn_id in dfs_ha_namemodes_ids_list:
nn_host = config['configurations']['hdfs-site'][format('dfs.namenode.rpc-address.{dfs_ha_nameservices}.{nn_id}')]
if hostname.lower() in nn_host.lower():
namenode_id = nn_id
namenode_rpc = nn_host
hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
hdfs_user = hadoop_user
grep_exe = "findstr"
name_node_params = default("/commandParams/namenode", None)
service_map = {
"datanode" : datanode_win_service_name,
"journalnode" : journalnode_win_service_name,
"namenode" : namenode_win_service_name,
"secondarynamenode" : snamenode_win_service_name,
"zkfc_slave": zkfc_win_service_name
}
|
GabrielBrascher/cloudstack
|
test/selenium/smoke/TemplatesAndISO.py
|
Python
|
apache-2.0
| 6,757
| 0.027823
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''
ISO PART YET TO BE ADDED:: remove this after adding it.
'''
import sys, os
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/'+'../lib'))
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
import unittest, time
import initialize
import Global_Locators
class Template_Add(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_templateadd(self):
driver = self.driver
## Action part
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
# Go to Templates
driver.find_element_by_xpath(Global_Locators.templates_xpath).click()
#Select Template from drop down list
driver.find_element_by_xpath(Global_Locators.template_xpath).click()
# Add Template
driver.find_element_by_xpath(Global_Locators.AddTemplate_xpath).click()
# Following have names.. so they do not have their global entries.
driver.find_element_by_id("label_name").clear()
driver.find_element_by_id("label_name").send_keys("Test Template Ubuntu")
driver.find_element_by_id("label_description").clear()
driver.find_element_by_id("label_description").send_keys("Ubuntu 10.04")
driver.find_element_by_id("URL").clear()
driver.find_element_by_id("URL").send_keys("http://nfs1.lab.vmops.com/templates/Ubuntu/Ubuntuu-10-04-64bit-server.vhd")
Select(driver.find_element_by_id("label_os_type")).select_by_visible_text("Ubuntu 10.04 (64-bit)")
driver.find_element_by_id("label_public").click()
driver.find_element_by_id("label_featured").click()
driver.find_element_by_xpath("//button[@type='button']").click()
ti
|
me.sleep(2)
# Go to Dash Board
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
ti
|
me.sleep(600)
##Verification will be if this offering shows up into table and we can actually edit it.
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Template_Edit(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_templateedit(self):
driver = self.driver
## Action part
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
# Go to Templates
driver.find_element_by_xpath(Global_Locators.templates_xpath).click()
#Select Template from drop down list
driver.find_element_by_xpath(Global_Locators.template_xpath).click()
linkclass = None
linkclass = driver.find_elements_by_xpath(Global_Locators.template_table_xpath) # This returns a list
for link in linkclass:
if link.text == "Test Template Ubuntu": # We will search for our VM in this table
link.click()
time.sleep(2)
# Change name
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("Test template")
# Change Description
driver.find_element_by_name("displaytext").clear()
driver.find_element_by_name("displaytext").send_keys("ubuntu")
driver.find_element_by_css_selector(Global_Locators.template_editdone_css).click()
time.sleep(2)
#Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(10)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
# Now we will find this offering and delete it!!
class Template_Delete(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_templatedelete(self):
driver = self.driver
## Action part
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
# Go to Templates
driver.find_element_by_xpath(Global_Locators.templates_xpath).click()
#Select Template from drop down list
driver.find_element_by_xpath(Global_Locators.template_xpath).click()
linkclass = None
linkclass = driver.find_elements_by_xpath(Global_Locators.template_table_xpath) # This returns a list
for link in linkclass:
if link.text == "Test Template": # We will search for our VM in this table
link.click()
time.sleep(2)
driver.find_element_by_css_selector(Gloabl_Locators.template_delete_css).click()
driver.find_element_by_xpath(Global_Locators.yesconfirmation_xapth).click()
time.sleep(2)
#Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(20)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
|
jose-lpa/elastic-django
|
tests/test_client.py
|
Python
|
bsd-3-clause
| 2,202
| 0
|
from unittest import TestCase
from django.conf import settings
from django.test.utils import override_settings
from mock import patch
from elastic_django.client import ElasticsearchClient
from elastic_django.exceptions import ElasticsearchClientConfigurationError
class ElasticsearchClientTestCase(TestCase):
def test_client_constructor_sanity_check(self):
"""
Tests sanity checks in ``ElasticsearchClient.__init__``.
"""
self.assertRaises(
ElasticsearchClientConfigurationError,
ElasticsearchClient,
hosts='not a list or tuple'
)
@override_settings(ELASTICSEARCH_HOSTS=None)
@patch('elasticsearch.Elasticsearch.ping')
def test_no
|
_hosts_given_nor_configured(self, mock):
"""
Tests client behaviour when being called with no hosts specified and no
hosts defined in Django settings. It should fallback to the ES default
expected configuration (localhost, port 9200).
"""
# Delete setting.
del settings.ELASTICSEARCH_HOSTS
# Mock ES backend ping response to pass test.
mock.return_val
|
ue = True
client = ElasticsearchClient()
self.assertEqual(client.hosts, [{'host': 'localhost', 'port': '9200'}])
@override_settings(
ELASTICSEARCH_HOSTS=[{'host': '127.0.0.1', 'port': '443'}])
@patch('elasticsearch.Elasticsearch.ping')
def test_no_hosts_given_and_configured(self, mock):
"""
Tests client behaviour when being called with no hosts specified and
hosts already defined in Django settings.
"""
# Mock ES backend ping response to pass test.
mock.return_value = True
client = ElasticsearchClient()
self.assertEqual(client.hosts, [{'host': '127.0.0.1', 'port': '443'}])
@override_settings(
ELASTICSEARCH_HOSTS=[{'host': '127.0.0.1', 'port': '9999'}])
def test_no_ping_response(self):
"""
Tests exception raised when backend doesn't respond to ping - specified
backend is unavailable.
"""
self.assertRaises(
ElasticsearchClientConfigurationError, ElasticsearchClient)
|
ovh/ip-reputation-monitoring
|
reputation/parsing/csv/blocklistde.py
|
Python
|
gpl-3.0
| 2,133
| 0.000469
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2016, OVH SAS
#
# This file is part of ip-reputation-monitoring.
#
# ip-reputation-monitoring is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Blocklist CSV parser
"""
from datetime import datetime
from parsing.csv.csvparser import CSVParser
PARSER_NAME = 'BlockList'
def compute_weight(service):
"""
Map a service with a weight. All reported services have a
default weight of 10, except ssh failed attemp (1), manual
list addition (5) and the wtf category "all" (5).
"""
return {
'ssh': 1,
'all': 5,
'manually added': 5
}.get(service, 10)
class BlockListParser(CSVParser):
"""
Blocklist.de dedicated csv parser
"""
def __init__(self, path):
CSVParser.__init__(self, path, ':')
def compute_weight(self, data):
return compute_weight(self._get_service(data[3]))
def get_date(self, data):
timestamp = float(data[4].strip()[:10])
return datetime.utcfromtimestamp(timestamp)
def get_source(self, data):
return PARSER_NAME
|
def get_ip(self, data):
if len(data) != 6:
return None
return data[0]
def _get_service(self, cell):
""" Try to extract service associated to the issue from a c
|
ell """
return cell.strip().split(',')[0]
@staticmethod
def get_description():
""" Mandatory method for auto-registration """
return {
'name': PARSER_NAME,
'shortened': 'BLCK'
}
|
birkholz/homeboard
|
chores/migrations/0001_initial.py
|
Python
|
gpl-2.0
| 1,978
| 0.002022
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('home', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Assignment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Chore',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('title', models.CharField(max_length=255)),
('description', models.TextField()),
('assigned_to', models.ManyToManyField(to=settings.AUTH_USER_MODEL, through='chores.Assignment')),
|
('home', models.ForeignKey(related_name='chores', to='home.Home')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.AddField(
model_name='assignment',
name='chore',
field=models.ForeignKey(to='chores.Chore'),
preserve_default=True,
),
migr
|
ations.AddField(
model_name='assignment',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
]
|
django-settings/django-settings
|
myproject/myproject/local_settings.py
|
Python
|
unlicense
| 301
| 0.003322
|
# Define settings that are specific to the local environment.
DATABASES = {
'default': {
|
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'database.db',
}
}
INTERNAL_IPS = ('127.0.0.1',)
f
|
rom custom_settings import INSTALLED_APPS
INSTALLED_APPS += (
# 'debug_toolbar',
)
|
diofant/diofant
|
diofant/tests/core/test_relational.py
|
Python
|
bsd-3-clause
| 22,310
| 0.000359
|
import operator
import random
import pytest
from diofant import (And, Eq, Equality, FiniteSet, Float, Function, Ge,
GreaterThan, Gt, I, Implies, Integer, Interval, Le,
LessThan, Lt, Ne, Not, Or, Rational, Rel, Relational,
StrictGreaterThan, StrictLessThan, Symbol, Unequality,
Wild, Xor, ceiling, false, floor, nan, oo, pi, simplify,
sqrt, true, zoo)
from diofant.abc import t, w, x, y, z
from diofant.core.relational import _Inequality as Inequality
__all__ = ()
def test_rel_ne():
assert Relational(x, y, '!=') == Ne(x, y)
def test_rel_subs():
e = Relational(x, y, '==')
e = e.subs({x: z})
assert isinstance(e, Equality)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '>=')
e = e.subs({x: z})
assert isinstance(e, GreaterThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '<=')
e = e.subs({x: z})
assert isinstance(e, LessThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '>')
e = e.subs({x: z})
assert isinstance(e, StrictGreaterThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '<')
e = e.subs({x: z})
assert isinstance(e, StrictLessThan)
assert e.lhs == z
assert e.rhs == y
e = Eq(x, 0)
assert e.subs({x: 0}) is true
assert e.subs({x: 1}) is false
def test_wrappers():
e = x + x**2
res = Relational(y, e, '==')
assert Rel(y, x + x**2, '==') == res
assert Eq(y, x + x**2) == res
res = Relational(y, e, '<')
assert Lt(y, x + x**2) == res
res = Relational(y, e, '<=')
assert Le(y, x + x**2) == res
res = Relational(y, e, '>')
assert Gt(y, x + x**2) == res
res = Relational(y, e, '>=')
assert Ge(y, x + x**2) == res
res = Relational(y, e, '!=')
assert Ne(y, x + x**2) == res
def test_Eq():
assert Eq(x**2, 0) == Eq(x**2, 0)
assert Eq(x**2, 0) != Eq(x**2, 1)
pytest.raises(TypeError, lambda: Eq(x))
assert Eq(x, x) # issue sympy/sympy#5719
# issue sympy/sympy#6116
p = Symbol('p', positive=True)
assert Eq(p, 0) is false
def test_rel_Infinity():
# pylint: disable=comparison-with-itself
# NOTE: All of these are actually handled by diofant.core.Number, and do
# not create Relational objects.
assert (oo > oo) is false
assert (oo > -oo) is true
assert (oo > 1) is true
assert (oo < oo) is false
assert (oo < -oo) is fals
|
e
assert (oo < 1) is false
assert (oo >= oo) is true
assert (oo >= -oo) is true
assert (oo >= 1) is true
assert (oo <= oo) is true
assert (oo <= -oo) is false
assert (oo <= 1) is false
assert (-oo > oo) is false
assert (-oo > -oo) is
|
false
assert (-oo > 1) is false
assert (-oo < oo) is true
assert (-oo < -oo) is false
assert (-oo < 1) is true
assert (-oo >= oo) is false
assert (-oo >= -oo) is true
assert (-oo >= 1) is false
assert (-oo <= oo) is true
assert (-oo <= -oo) is true
assert (-oo <= 1) is true
def test_bool():
assert Eq(0, 0) is true
assert Eq(1, 0) is false
assert Ne(0, 0) is false
assert Ne(1, 0) is true
assert Lt(0, 1) is true
assert Lt(1, 0) is false
assert Le(0, 1) is true
assert Le(1, 0) is false
assert Le(0, 0) is true
assert Gt(1, 0) is true
assert Gt(0, 1) is false
assert Ge(1, 0) is true
assert Ge(0, 1) is false
assert Ge(1, 1) is true
assert Eq(I, 2) is false
assert Ne(I, 2) is true
pytest.raises(TypeError, lambda: Gt(I, 2))
pytest.raises(TypeError, lambda: Ge(I, 2))
pytest.raises(TypeError, lambda: Lt(I, 2))
pytest.raises(TypeError, lambda: Le(I, 2))
a = Float('.000000000000000000001')
b = Float('.0000000000000000000001')
assert Eq(pi + a, pi + b) is false
def test_rich_cmp():
assert (x < y) == Lt(x, y)
assert (x <= y) == Le(x, y)
assert (x > y) == Gt(x, y)
assert (x >= y) == Ge(x, y)
def test_doit():
p = Symbol('p', positive=True)
n = Symbol('n', negative=True)
np = Symbol('np', nonpositive=True)
nn = Symbol('nn', nonnegative=True)
assert Gt(p, 0).doit() is true
assert Gt(p, 1).doit() == Gt(p, 1)
assert Ge(p, 0).doit() is true
assert Le(p, 0).doit() is false
assert Lt(n, 0).doit() is true
assert Le(np, 0).doit() is true
assert Gt(nn, 0).doit() == Gt(nn, 0)
assert Lt(nn, 0).doit() is false
assert Eq(x, 0).doit() == Eq(x, 0)
def test_new_relational():
assert Eq(x, 0) == Relational(x, 0) # None ==> Equality
assert Eq(x, 0) == Relational(x, 0, '==')
assert Eq(x, 0) == Relational(x, 0, 'eq')
assert Eq(x, 0) == Equality(x, 0)
assert Eq(x, -1) == Relational(x, -1) # None ==> Equality
assert Eq(x, -1) == Relational(x, -1, '==')
assert Eq(x, -1) == Relational(x, -1, 'eq')
assert Eq(x, -1) == Equality(x, -1)
assert Eq(x, 0) != Relational(x, 1) # None ==> Equality
assert Eq(x, 0) != Relational(x, 1, '==')
assert Eq(x, 0) != Relational(x, 1, 'eq')
assert Eq(x, 0) != Equality(x, 1)
assert Eq(x, -1) != Relational(x, 1) # None ==> Equality
assert Eq(x, -1) != Relational(x, 1, '==')
assert Eq(x, -1) != Relational(x, 1, 'eq')
assert Eq(x, -1) != Equality(x, 1)
assert Ne(x, 0) == Relational(x, 0, '!=')
assert Ne(x, 0) == Relational(x, 0, '<>')
assert Ne(x, 0) == Relational(x, 0, 'ne')
assert Ne(x, 0) == Unequality(x, 0)
assert Ne(x, 0) != Relational(x, 1, '!=')
assert Ne(x, 0) != Relational(x, 1, '<>')
assert Ne(x, 0) != Relational(x, 1, 'ne')
assert Ne(x, 0) != Unequality(x, 1)
assert Ge(x, 0) == Relational(x, 0, '>=')
assert Ge(x, 0) == Relational(x, 0, 'ge')
assert Ge(x, 0) == GreaterThan(x, 0)
assert Ge(x, 1) != Relational(x, 0, '>=')
assert Ge(x, 1) != Relational(x, 0, 'ge')
assert Ge(x, 1) != GreaterThan(x, 0)
assert (x >= 1) == Relational(x, 1, '>=')
assert (x >= 1) == Relational(x, 1, 'ge')
assert (x >= 1) == GreaterThan(x, 1)
assert (x >= 0) != Relational(x, 1, '>=')
assert (x >= 0) != Relational(x, 1, 'ge')
assert (x >= 0) != GreaterThan(x, 1)
assert Le(x, 0) == Relational(x, 0, '<=')
assert Le(x, 0) == Relational(x, 0, 'le')
assert Le(x, 0) == LessThan(x, 0)
assert Le(x, 1) != Relational(x, 0, '<=')
assert Le(x, 1) != Relational(x, 0, 'le')
assert Le(x, 1) != LessThan(x, 0)
assert (x <= 1) == Relational(x, 1, '<=')
assert (x <= 1) == Relational(x, 1, 'le')
assert (x <= 1) == LessThan(x, 1)
assert (x <= 0) != Relational(x, 1, '<=')
assert (x <= 0) != Relational(x, 1, 'le')
assert (x <= 0) != LessThan(x, 1)
assert Gt(x, 0) == Relational(x, 0, '>')
assert Gt(x, 0) == Relational(x, 0, 'gt')
assert Gt(x, 0) == StrictGreaterThan(x, 0)
assert Gt(x, 1) != Relational(x, 0, '>')
assert Gt(x, 1) != Relational(x, 0, 'gt')
assert Gt(x, 1) != StrictGreaterThan(x, 0)
assert (x > 1) == Relational(x, 1, '>')
assert (x > 1) == Relational(x, 1, 'gt')
assert (x > 1) == StrictGreaterThan(x, 1)
assert (x > 0) != Relational(x, 1, '>')
assert (x > 0) != Relational(x, 1, 'gt')
assert (x > 0) != StrictGreaterThan(x, 1)
assert Lt(x, 0) == Relational(x, 0, '<')
assert Lt(x, 0) == Relational(x, 0, 'lt')
assert Lt(x, 0) == StrictLessThan(x, 0)
assert Lt(x, 1) != Relational(x, 0, '<')
assert Lt(x, 1) != Relational(x, 0, 'lt')
assert Lt(x, 1) != StrictLessThan(x, 0)
assert (x < 1) == Relational(x, 1, '<')
assert (x < 1) == Relational(x, 1, 'lt')
assert (x < 1) == StrictLessThan(x, 1)
assert (x < 0) != Relational(x, 1, '<')
assert (x < 0) != Relational(x, 1, 'lt')
assert (x < 0) != StrictLessThan(x, 1)
# finally, some fuzz testing
for _ in range(100):
while 1:
strtype, length = (chr, 65535) if random.randint(0, 1) else (chr, 255)
relation_type = strtype(random.randint(0, length))
if random.randint(0, 1):
relation_type += strtype(ran
|
adel-boutros/qpid-dispatch
|
tests/system_tests_multi_tenancy.py
|
Python
|
apache-2.0
| 35,894
| 0.004987
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest, os, json
from subprocess import PIPE, STDOUT
from proton import Message, PENDING, ACCEPTED, REJECTED, RELEASED, SSLDomain, SSLUnavailable, Timeout
from system_test import TestCase, Qdrouterd, main_module, DIR, TIMEOUT, Process
from proton.handlers import MessagingHandler
from proton.reactor import Container, DynamicNodeProperties
# PROTON-828:
try:
from proton import MODIFIED
except ImportError:
from proton import PN_STATUS_MODIFIED as MODIFIED
class RouterTest(TestCase):
inter_router_port = None
@classmethod
def setUpClass(cls):
"""Start a router"""
super(RouterTest, cls).setUpClass()
def router(name, connection):
config = [
('router', {'mode': 'interior', 'id': name}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'multiTenant': 'yes'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'role': 'route-container'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'dir': 'in', 'containerId': 'LRC'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'dir': 'out', 'containerId': 'LRC'}),
('autoLink', {'addr': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'dir': 'in'}),
('autoLink', {'addr': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'dir': 'out'}),
('address', {'prefix': 'closest', 'distribution': 'closest'}),
('address', {'prefix': 'spread', 'distribution': 'balanced'}),
('address', {'prefix': 'multicast', 'distribution': 'multicast'}),
('address', {'prefix': '0.0.0.0/queue', 'waypoint': 'yes'}),
connection
]
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
cls.routers = []
inter_router_port = cls.tester.get_port()
router('A', ('listener', {'role': 'inter-router', 'port': inter_router_port}))
router('B', ('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port, 'verifyHostName': 'no'}))
cls.routers[0].wait_router_connected('B')
cls.routers[1].wait_router_connected('A')
def test_01_one_router_targeted_sender_no_tenant(self):
test = MessageTransferTest(self.routers[0].addresses[0],
self.routers[0].addresses[0],
"anything/addr_01",
"anything/addr_01",
self.routers[0].addresses[0],
"M0anything/addr_01")
test.run()
self.assertEqual(None, test.error)
def test_02_one_router_targeted_sender_tenant_on_sender(self):
test = MessageTransferTest(self.routers[0].addresses[1],
self.routers[0].addresses[0],
"addr_02",
"0.0.0.0/addr_02",
self.routers[0].addresses[0],
"M00.0.0.0/addr_02")
test.run()
self.assertEqual(None, test.error)
def test_03_one_router_targeted_sender_tenant_on_receiver(self):
test = MessageTransferTest(self.routers[0].addresses[0],
self.routers[0].addresses[1],
"0.0.0.0/addr_03",
"addr_03",
self.routers[0].addresses[0],
"M00.0.0.0/addr_03")
test.run()
self.assertEqual(None, test.error)
def test_04_one_router_targeted_sender_tenant_on_both(self):
test = MessageTransferTest(self.routers[0].addresses[1],
self.routers[0].addresses[1],
"addr_04",
"addr_04",
self.routers[0].addresses[0],
"M00.0.0.0/addr_04")
test.run()
self.assertEqual(None, test.error)
def test_05_two_router_targeted_sender_no_tenant(self):
test = MessageTransferTest(self.routers[0].addresses[0],
self.routers[1].addresses[0],
"0.0.0.0/addr_05",
"0.0.0.0/addr_05",
self.routers[0].addresses[0],
"M00.0.0.0/addr_05")
test.run()
self.assertEqual(None, test.error)
def test_06_two_router_targeted_sender_tenant_on_sender(self):
test = MessageTransferTest(self.routers[0].addresses[1],
self.routers[1].addresses[0],
"addr_06",
"0.0.0.0/addr_06",
self.routers[0].addresses[0],
"M00.0.0.0/addr_06")
test.run()
self.assertEqual(None, test.error)
def test_07_two_router_targeted_sender_tenant_on_receiver(self):
test = MessageTransferTest(self.routers[0].addresses[0],
self.routers[1].addresses[1],
"0.0.0.0/addr_07",
"addr_07",
self.routers[0].addresses[0],
"M00.0.0.0/addr_07")
test.run()
self.assertEqual(None, test.error)
def test_08_two_router_targeted_sender_tenant_on_both(self):
test = MessageTransferTest(self.routers[0].addresses[1],
|
self.routers[1].addresses[1],
"addr_08",
"addr_08",
self.routers[0].addresses[0],
"M00.0.0.0/addr_08")
test.run()
self.assertEqual(None, test.error)
def test_09_one_router_anonymous_sender_no_tenant(self):
test = MessageTransferAnonTest(self.routers[0].addresse
|
s[0],
self.routers[0].addresses[0],
"anything/addr_09",
"anything/addr_09",
self.routers[0].addresses[0],
"M0anything/addr_09")
test.run()
self.assertEqual(None, test.error)
def test_10_one_router_anonymous_sender_tenant_on_sender(self):
test = MessageTransferAnonTest(self.routers[0].addresses[1],
self.routers[0].addresses[0],
"addr_10",
"0.0.0.0/addr_10",
self.routers[0].addresses[0],
"M00.0.0.0/addr_10")
test.run()
self.assertEqual(None, test.error)
def test_11_one_router_anonymous_sender_tenant_on_receiver(self):
test = MessageTransferAnonTest(s
|
davenquinn/Attitude
|
attitude/stereonet.py
|
Python
|
mit
| 7,069
| 0.014854
|
import numpy as N
from mplstereonet import stereonet_math
from scipy.stats import chi2
from numpy.testing import assert_array_almost_equal
from .geom.util import vector, unit_vector, dot
def quaternion(vector, angle):
"""
Unit quaternion for a vector and an angle
"""
return N.cos(angle/2)+vector*N.sin(angle/2)
def ellipse(n=1000, adaptive=False):
"""
Get a parameterized set of vectors defining
ellipse for a major and minor axis length.
Resulting vector bundle has major axes
along axes given.
"""
u = N.linspace(0,2*N.pi,n)
# Get a bundle of vectors defining
# a full rotation around the unit circle
return N.array([N.cos(u),N.sin(u)]).T
def sph2cart(lat,lon):
_ = stereonet_math.sph2cart(lat,lon)
#val = N.array(_).flatten()
val = N.roll(_,-1)
val[:-1] *= -1
return val
def scale_errors(cov_axes, confidence_level=0.95):
"""
Returns major axes of error ellipse or
hyperbola, rescaled using chi2 test statistic
"""
dof = len(cov_axes)
x2t = chi2.ppf(confidence_level,dof)
return N.sqrt(x2t*cov_axes)
def normal_errors(axes, covariance_matrix, **kwargs):
"""
Currently assumes upper hemisphere of stereonet
"""
level = kwargs.pop('level',1)
traditional_layout = kwargs.pop('traditional_layout',True)
d = N.diagonal(covariance_matrix)
ell = ellipse(**kwargs)
if axes[2,2] < 0:
axes *= -1
# Not sure where this factor comes from but it
# seems to make things work better
c1 = 2
axis_lengths = d[:2]
f = N.linalg.norm(
ell*axis_lengths,axis=1)
e0 = -ell.T*d[2]*c1
e = N.vstack((e0,f))
_ = dot(e.T,axes).T
if traditional_layout:
lon,lat = stereonet_math.cart2sph(_[2],_[0],-_[1])
else:
lon,lat = stereonet_math.cart2sph(-_[1],_[0],_[2])
return list(zip(lon,lat))
def test_ellipse():
ell = ellipse(n=1000)
u = N.linspace(0, 2*N.pi, n)
arr = N.hstack([N.cos(u), N.sin(u)])
assert_array_almost_equal(ell, arr)
def plane_errors(axes, covariance_matrix, sheet='upper'
|
,**kwargs):
"""
kwargs:
traditional_layout boolean [True]
Lay the stereonet out traditionally, with n
|
orth at the pole of
the diagram. The default is a more natural and intuitive visualization
with vertical at the pole and the compass points of strike around the equator.
Thus, longitude at the equator represents strike and latitude represents
apparent dip at that azimuth.
"""
level = kwargs.pop('level',1)
traditional_layout = kwargs.pop('traditional_layout',True)
d = covariance_matrix
ell = ellipse(**kwargs)
bundle = dot(ell, d[:2])
res = d[2]*level*2
# Switch hemispheres if PCA is upside-down
# Normal vector is always correctly fit
#if traditional_layout:
#if axes[2,2] > 0:
if axes[2,2] > 0:
res *= -1
if sheet == 'upper':
bundle += res
elif sheet == 'lower':
bundle -= res
_ = dot(bundle,axes).T
if traditional_layout:
lon,lat = stereonet_math.cart2sph(_[2],_[0],_[1])
else:
lon,lat = stereonet_math.cart2sph(-_[1],_[0],_[2])
return list(zip(lon,lat))
def iterative_normal_errors(axes, covariance_matrix, **kwargs):
"""
Currently assumes upper hemisphere of stereonet
"""
level = kwargs.pop('level',1)
traditional_layout = kwargs.pop('traditional_layout',True)
n = kwargs.get('n', 100)
d = N.diagonal(covariance_matrix)
u = N.linspace(0, 2*N.pi, n)
if axes[2,2] < 0:
axes *= -1
# Not sure where this factor comes from but it
# seems to make things work better
c1 = 2
def sdot(a,b):
return sum([i*j for i,j in zip(a,b)])
def step_func(a):
e = [
-c1*d[2]*N.cos(a),
-c1*d[2]*N.sin(a),
N.linalg.norm([N.cos(a)*d[0],N.sin(a)*d[1]])
]
r = [sdot(e,i)
for i in axes.T]
if traditional_layout:
x,y,z = r[2],r[0],-r[1]
else:
x,y,z = -r[1],r[0],r[2]
r = N.sqrt(x**2 + y**2 + z**2)
lon = N.arctan2(y, x)
lat = N.arcsin(z/r)
return lon,lat
# Get a bundle of vectors defining
# a full rotation around the unit circle
vals = [step_func(i) for i in u]
return vals
def iterative_plane_errors(axes,covariance_matrix, **kwargs):
"""
An iterative version of `pca.plane_errors`,
which computes an error surface for a plane.
"""
sheet = kwargs.pop('sheet','upper')
level = kwargs.pop('level',1)
n = kwargs.pop('n',100)
cov = N.diagonal(covariance_matrix)
u = N.linspace(0, 2*N.pi, n)
scales = dict(upper=1,lower=-1,nominal=0)
c1 = scales[sheet]*2 # We double the scale of errors since they are symmetrical
c1 *= -1 # We assume upper hemisphere
if axes[2,2] < 0:
c1 *= -1
def sdot(a,b):
return sum([i*j for i,j in zip(a,b)])
def step_func(a):
e = [
N.cos(a)*cov[0],
N.sin(a)*cov[1],
c1*cov[2]]
d = [sdot(e,i)
for i in axes.T]
x,y,z = d[2],d[0],d[1]
r = N.sqrt(x**2 + y**2 + z**2)
lat = N.arcsin(z/r)
lon = N.arctan2(y, x)
return lon,lat
# Get a bundle of vectors defining
# a full rotation around the unit circle
return N.array([step_func(i)
for i in u])
def error_ellipse(axes, covariance_matrix, **kwargs):
level = kwargs.pop('level',1)
traditional_layout = kwargs.pop('traditional_layout',True)
d = N.sqrt(covariance_matrix)
ell = ellipse(**kwargs)
# Bundle of vectors surrounding nominal values
bundle = dot(ell, d[:2])
res = d[2]*level
# Switch hemispheres if PCA is upside-down
# Normal vector is always correctly fit
if axes[2,2] > 0:
res *= -1
normal = vector(0,0,1)
_ = normal + bundle
if traditional_layout:
lon,lat = stereonet_math.cart2sph(_[2],_[0],_[1])
else:
lon,lat = stereonet_math.cart2sph(-_[1],_[0],_[2])
return list(zip(lon,lat))
def error_coords(axes, covariance_matrix, **kwargs):
# Support for multiple levels of errors
# (not sure if this directly corresponds
# to sigma).
levels = kwargs.pop('levels',None)
do_ellipse = kwargs.pop('ellipse',True)
u = 'upper'
l = 'lower'
def _(half, level=1):
lonlat = plane_errors(axes, covariance_matrix,
half, level=level, **kwargs)
return N.degrees(lonlat).tolist()
def __(level):
data = dict(
upper=_(u, level),
lower=_(l, level))
if do_ellipse:
ell = error_ellipse(
axes, covariance_matrix,
level=level, **kwargs)
data['ellipse'] = N.degrees(ell).tolist()
return data
out = dict(nominal=_('nominal'))
if levels is None:
i = __(1)
else:
i = {l:__(l) for l in levels}
out.update(i)
return out
|
MatthewWilkes/mw4068-packaging
|
src/melange/src/soc/tasks/helper/__init__.py
|
Python
|
apache-2.0
| 654
| 0.004587
|
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a c
|
opy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR C
|
ONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains Melange Task API related helper modules."""
|
miyakz1192/neutron
|
neutron/tests/api/test_extra_dhcp_options.py
|
Python
|
apache-2.0
| 4,030
| 0.000248
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
from neutron.tests.api import base
from neutron.tests.tempest import test
class ExtraDHCPOptionsTestJSON(base.BaseNetworkTest):
"""
Tests the following operations with the Extra DHCP Options Neutron API
extension:
port create
port list
port show
port update
v2.0 of the Neutron API is assumed. It is also assumed that the Extra
DHCP Options extension is enabled in the [network-feature-enabled]
section of etc/tempest.conf
"""
@classmethod
def resource_setup(cls):
super(ExtraDHCPOptionsTestJSON, cls).resource_setup()
if not test.is_extension_enabled('extra_dhcp_opt', 'network'):
msg = "Extra DHCP Options extension not enabled."
raise cls.skipException(msg)
cls.network = cls.create_network()
cls.subnet = cls.create_subnet(cls.network)
cls.port = cls.create_port(cls.network)
cls.ip_tftp = ('123.123.123.123' if cls._ip_version == 4
else '2015::dead')
cls.ip_server = ('123.123.123.45' if cls._ip_version == 4
else '2015::badd')
cls.extra_dhcp_opts = [
{'opt_value': 'pxelinux.0', 'opt_name': 'bootfile-name'},
{'opt_value': cls.ip_tftp, 'opt_name': 'tftp-server'},
{'opt_value': cls.ip_server, 'opt_name': 'server-ip-address'}
]
@test.attr(type='smoke')
@test.idempotent_id('d2c17063-3767-4a24-be4f-a23dbfa133c9')
def test_create_list_port_with_extra_dhcp_options(self):
# Create a port with
|
Extra DHCP Options
body = self.client.create_port(
network_id=self.network['id'],
extra_dhcp_opts=self.extra_dhcp_opts)
port_id =
|
body['port']['id']
self.addCleanup(self.client.delete_port, port_id)
# Confirm port created has Extra DHCP Options
body = self.client.list_ports()
ports = body['ports']
port = [p for p in ports if p['id'] == port_id]
self.assertTrue(port)
self._confirm_extra_dhcp_options(port[0], self.extra_dhcp_opts)
@test.attr(type='smoke')
@test.idempotent_id('9a6aebf4-86ee-4f47-b07a-7f7232c55607')
def test_update_show_port_with_extra_dhcp_options(self):
# Update port with extra dhcp options
name = data_utils.rand_name('new-port-name')
body = self.client.update_port(
self.port['id'],
name=name,
extra_dhcp_opts=self.extra_dhcp_opts)
# Confirm extra dhcp options were added to the port
body = self.client.show_port(self.port['id'])
self._confirm_extra_dhcp_options(body['port'], self.extra_dhcp_opts)
def _confirm_extra_dhcp_options(self, port, extra_dhcp_opts):
retrieved = port['extra_dhcp_opts']
self.assertEqual(len(retrieved), len(extra_dhcp_opts))
for retrieved_option in retrieved:
for option in extra_dhcp_opts:
if (retrieved_option['opt_value'] == option['opt_value'] and
retrieved_option['opt_name'] == option['opt_name']):
break
else:
self.fail('Extra DHCP option not found in port %s' %
str(retrieved_option))
class ExtraDHCPOptionsIpV6TestJSON(ExtraDHCPOptionsTestJSON):
_ip_version = 6
|
lkylei/ten_thousand
|
roms/u-boot/tools/patman/gitutil.py
|
Python
|
gpl-2.0
| 18,813
| 0.002232
|
# Copyright (c) 2011 The Chromium OS Authors.
#
# SPDX-License-Identifier: GPL-2.0+
#
import command
import re
import os
import series
import subprocess
import sys
import terminal
import checkpatch
import settings
def CountCommitsToBranch():
"""Returns number of commits between HEAD and the tracking branch.
This looks back to the tracking branch and works out the number of commits
since then.
Return:
Number of patches that exist on top of the branch
"""
pipe = [['git', 'log', '--no-color', '--oneline', '--no-decorate',
'@{upstream}..'],
['wc', '-l']]
stdout = command.RunPipe(pipe, capture=True, oneline=True).stdout
patch_count = int(stdout)
return patch_count
def GetUpstream(git_dir, branch):
"""Returns the name of the upstream for a branch
Args:
git_dir: Git directory containing repo
branch: Name of branch
Returns:
Name of upstream branch (e.g. 'upstream/master') or None if none
"""
try:
remote = command.OutputOneLine('git', '--git-dir', git_dir, 'config',
'branch.%s.remote' % branch)
merge = command.OutputOneLine('git', '--git-dir', git_dir, 'config',
'branch.%s.merge' % branch)
except:
return None
if remote == '.':
return merge
elif remote and merge:
leaf = merge.split('/')[-1]
return '%s/%s' % (remote, leaf)
else:
raise ValueError, ("Cannot determine upstream branch for branch "
"'%s' remote='%s', merge='%s'" % (branch, remote, merge))
def GetRangeInBranch(git_dir, branch, include_upstream=False):
"""Returns an expression for the commits in the given branch.
Args:
git_dir: Directory containing git repo
branch: Name of branch
Return:
Expression in the form 'upstream..branch' which can be used to
access the commits. If the branch does not exist, returns None.
"""
upstream = GetUpstream(git_dir, branch)
if not upstream:
return None
return '%s%s..%s' % (upstream, '~' if include_upstream else '', branch)
def CountCommitsInBranch(git_dir, branch, include_upstream=False):
"""Returns the number of commits in the given branch.
Args:
git_dir: Directory containing git repo
branch: Name of branch
Return:
Number of patches that exist on top of the branch, or None if the
branch does not exist.
"""
range_expr = GetRangeInBranch(git_dir, branch, include_upstream)
if not range_expr:
return None
pipe = [['git', '--git-dir', git_dir, 'log', '--oneline', '--no-decorate',
range_expr],
['wc', '-l']]
result = command.RunPipe(pipe, capture=True, oneline=True)
patch_count = int(result.stdout)
return patch_count
def CountCommits(commit_range):
"""Returns the number of commits in the given range.
Args:
commit_range: Range of commits to count (e.g. 'HEAD..base')
Return:
Number of patches that exist on top of the branch
"""
pipe = [['git', 'log', '--oneline', '--no-decorate', commit_range],
['wc', '-l']]
stdout = command.RunPipe(pipe, capture=True, oneline=True).stdout
patch_count = int(stdout)
return patch_count
def Checkout(commit_hash, git_dir=None, work_tree=None, force=False):
"""Checkout the selected commit for this build
Args:
commit_hash: Commit hash to check out
"""
pipe = ['git']
if git_dir:
pipe.extend(['--git-dir', git_dir])
if work_tree:
pipe.extend(['--work-tree', work_tree])
pipe.append('checkout')
if force:
pipe.append('-f')
pipe.append(commit_hash)
result = command.RunPipe([pipe], capture=True, raise_on_error=False)
if result.return_code != 0:
raise OSError, 'git checkout (%s): %s' % (pipe, result.stderr)
def Clone(git_dir, output_dir):
"""Checkout the selected commit for this build
Args:
commit_hash: Commit hash to check out
"""
pipe = ['git', 'clone', git_dir, '.']
result = command.RunPipe([pipe], capture=True, cwd=output_dir)
if result.return_code != 0:
raise OSError, 'git clone: %s' % result.stderr
def Fetch(git_dir=None, work_tree=None):
"""Fetch from the origin repo
Args:
commit_hash: Commit hash to check out
"""
pipe = ['git']
if git_dir:
pipe.extend(['--git-dir', git_dir])
if work_tree:
pipe.extend(['--work-tree', work_tree])
pipe.append('fetch')
result = command.RunPipe([pipe], capture=True)
if result.return_code != 0:
raise OSError, 'git fetch: %s' % result.stderr
def CreatePatches(start, count, series):
"""Create a series of patches from the top of the current branch.
The patch files are written to the current directory using
git format-patch.
Args:
start: Commit to start from: 0=HEAD, 1=next one, etc.
count: number of commits to include
Return:
Filename of cover letter
List of filenames of patch files
"""
if series.get('version'):
version = '%s ' % series['version']
cmd = ['git', 'format-patch', '-M', '--signoff']
if series.get('cover'):
cmd.append('--cover-letter')
prefix = series.GetPatchPrefix()
if prefix:
cmd += ['--subject-prefix=%s' % prefix]
cmd += ['HEAD~%d..HEAD~%d' % (start + count, start)]
stdout = command.RunList(cmd)
files = stdout.splitlines()
# We have an extra file if there is a cover letter
if series.get('cover'):
return files[0], files[1:]
else:
return None, files
def ApplyPatch(verbose, fname):
"""Apply a patch with git am to test it
TODO: Convert these to use command, with stderr option
Args:
fname: filename of patch file to apply
"""
col = terminal.Color()
cmd = ['git', 'am', fname]
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = pipe.communicate()
re_error = re.compile('^error: patch failed: (.+):(\d+)')
for line in stderr.splitlines():
if verbose:
print line
match = re_error.match(line)
if match:
print checkpatch.GetWarningMsg(col, 'warning', match.group(1),
int(match.group(2)), 'Patch failed')
return pipe.returncode == 0, stdout
def ApplyPatches(verbose, args, start_point):
"""Apply the patches with git am to make sure all is well
Args:
verbose: Print out 'git am' output verbatim
args: List of patch files to apply
start_point: Number of commits back from HEAD to start applying.
Normally this is len(args), but it can be larger if a start
offset was given.
"""
error_count = 0
col = terminal.Color()
# Figure out our current position
cmd = ['git', 'name-rev', 'HEAD', '--name-only']
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
stdout, stderr = pipe.communicate()
if pipe.returncode:
str = 'Could not find current commit name'
print col.Color(col.RED, str)
print stdout
return False
old_head = stdout.splitlines()[0]
# Checkout the required start point
cmd = ['git', 'checkout', 'HEAD~%d' % start_point]
pipe =
|
subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.
|
PIPE)
stdout, stderr = pipe.communicate()
if pipe.returncode:
str = 'Could not move to commit before patch series'
print col.Color(col.RED, str)
print stdout, stderr
return False
# Apply all the patches
for fname in args:
ok, stdout = ApplyPatch(verbose, fname)
if not ok:
print col.Color(col.RED, 'git am returned errors for %s: will '
'skip this patch' % fname)
if verbose:
print stdout
error_count += 1
cmd = ['git', 'am', '--skip']
pipe = subprocess.Popen(cmd, stdout=subpr
|
operasoftware/tlscommon
|
test_results.py
|
Python
|
apache-2.0
| 4,642
| 0.019604
|
# Copyright 2010-2012 Opera Software ASA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy
|
of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwa
|
re
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Created on 31. mars 2012
@author: yngve
'''
# List of status codes used in both web prober and the batch prober
RESULTC_CH_MINLEN_NOT_TESTED = "CMNT" # Client Hello Minimum length not tested
RESULTC_CH_MINLEN_PASSED_256 = "CMP1" # Client Hello Minimum length 256 passed
RESULTC_CH_MINLEN_FAILED_256 = "CMF1" # Client Hello Minimum length 256 failed
RESULTC_CH_MINLEN_FAILED_256_30 = "CM10" # Client Hello Minimum length 256 failed SSL v3
RESULTC_CH_MINLEN_FAILED_256_31 = "CM11" # Client Hello Minimum length 256 failed TLS 1.0
RESULTC_CH_MINLEN_FAILED_256_33 = "CM13" # Client Hello Minimum length 256 failed TLS 1.2
RESULTC_HIGHER_RECV_TEST = "HRVT" # Higher record versions than TLS 1.0 tested during handshake
RESULTC_HIGHER_RECV_NOTEST = "HRVN" # Higher record versions than TLS 1.0 not tested during handshake
RESULTC_RECV_ANY_FAILED = "HRAF" # Higher Record version tested during handshake, some failed
RESULTC_RECV_ANY_PASSED = "HRAP" # Higher Record version tested during handshake, all passed
RESULTC_RECV_32_FAILED = "HR2F" # Record version TLS 1.1 tested during handshake, failed
RESULTC_RECV_32_PASSED = "HR2P" # Record version TLS 1.1 tested during handshake, passed
RESULTC_RECV_33_FAILED = "HR3F" # Record version TLS 1.2 tested during handshake, failed
RESULTC_RECV_33_PASSED = "HR3P" # Record version TLS 1.2 tested during handshake, passed
TRESULTC_VALUES = (
(RESULTC_CH_MINLEN_NOT_TESTED, "Client Hello Minimum length not tested"),
(RESULTC_CH_MINLEN_PASSED_256, "Client Hello Minimum length 256 passed"),
(RESULTC_CH_MINLEN_FAILED_256, "Client Hello Minimum length 256 failed"),
(RESULTC_CH_MINLEN_FAILED_256_30,"Client Hello Minimum length 256 failed SSL v3"),
(RESULTC_CH_MINLEN_FAILED_256_31,"Client Hello Minimum length 256 failed TLS 1.0"),
(RESULTC_CH_MINLEN_FAILED_256_33,"Client Hello Minimum length 256 failed TLS 1.2"),
(RESULTC_HIGHER_RECV_TEST, "Higher record versions than TLS 1.0 tested during handshake"),
(RESULTC_HIGHER_RECV_NOTEST, "Higher record versions than TLS 1.0 not tested during handshake"),
(RESULTC_RECV_ANY_FAILED, "Higher Record version tested during handshake, some failed"),
(RESULTC_RECV_ANY_PASSED, "Higher Record version tested during handshake, all passed"),
(RESULTC_RECV_32_FAILED, "Record version TLS 1.1 tested during handshake, failed"),
(RESULTC_RECV_32_PASSED, "Record version TLS 1.1 tested during handshake, passed"),
(RESULTC_RECV_33_FAILED, "Record version TLS 1.2 tested during handshake, failed"),
(RESULTC_RECV_33_PASSED, "Record version TLS 1.2 tested during handshake, passed"),
)
TRESULTC_VALUES_dict = dict(TRESULTC_VALUES)
# Check for duplicates and missing status codes
__values_set = {}
for __result_var in dir():
if not __result_var.startswith("RESULTC_") or __result_var.startswith("RESULTC_VALUES"):
continue
if eval(__result_var) not in TRESULTC_VALUES_dict:
raise Exception("Entry %s was not present in RESULTC_VALUES list" % (__result_var,))
if eval(__result_var) in __values_set:
print "Double entry in RESULTC_* enum values: ", __result_var, ". Matches ", __values_set[ eval(__result_var)]
raise Exception("Double entry in RESULTC_* enum values: " + __result_var+ ". Matches "+ __values_set[ eval(__result_var)])
__values_set[eval(__result_var)] = __result_var
if any([len([__y for __y in TRESULTC_VALUES if __x[0] == __y[0]])>1 for __x in TRESULTC_VALUES]):
print "Double entry in RESULTC_* enum values"
raise Exception("Double entry in RESULTC_* enum values")
if any([len([__y for __y in TRESULTC_VALUES if __x != __y and __x[1] == __y[1]])>1 for __x in TRESULTC_VALUES]):
print "Double explanation entry in RESULTC_* enum values", str([__z for __z in [[(__x,__y) for __y in TRESULTC_VALUES if __x != __y and __x[1] == __y[1]] for __x in TRESULTC_VALUES] if len(__z) > 1])
raise Exception("Double explanation entry in RESULTC_* enum values" + str([__z for __z in [[(__x,__y) for __y in TRESULTC_VALUES if __x != __y and __x[1] == __y[1]] for __x in TRESULTC_VALUES] if len(__z) > 1]))
|
GamesCrafters/GamesmanClassic
|
src/py/games/tt2.py
|
Python
|
gpl-2.0
| 963
| 0.001038
|
import game
import server
class tt2(game.Game):
class TT2Process(server.GameProcess):
def memory_percent_usage(self):
return 0.0
def __init__(self, server, name):
game.Game.__init__(self, server, name)
self.process_class = self.TT2Process
def get_option(self, query):
if query['misere'] == 'yes':
return 1
def respond_to_unknown_request(self, req):
if req.command == 'getOptions':
options = [{'misere': 'yes',
'number': 1,
'width': 6,
'height': 3},
{'misere': 'no',
|
'number': 2,
'width': 6,
|
'height': 3},
{}]
req.respond(self.format_parsed(
{'status': 'ok',
'response': options}))
else:
raise NotImplemented()
|
false-git/mail2entry
|
postentry.py
|
Python
|
gpl-2.0
| 621
| 0.011272
|
#! /usr/bin/env python
"""Post a new
|
MT entry"""
# username, password, blogid, publish
from settings imp
|
ort *
import types
import xmlrpclib
def post(content):
"""Post an entry to a blog. Return postid on success."""
content.check()
weblogContent = { 'title' : content.getTitle(),
'description' : content.getEntry() }
server = xmlrpclib.ServerProxy(uri)
# on success, result should be an integer representing a postid
result = server.metaWeblog.newPost(blogid, username, password,
weblogContent, publish)
return result
|
Azure/azure-sdk-for-python
|
sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/__init__.py
|
Python
|
mit
| 15,615
| 0.004163
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._recoverable_databases_operations import RecoverableDatabasesOperations
from ._data_masking_policies_operations import DataMaskingPoliciesOperations
from ._data_masking_rules_operations import DataMaskingRulesOperations
from ._geo_backup_policies_operations import GeoBackupPoliciesOperations
from ._databases_operations import DatabasesOperations
from ._elastic_pools_operations import ElasticPoolsOperations
from ._replication_links_operations import ReplicationLinksOperations
from ._server_communication_links_operations import ServerCommunicationLinksOperations
from ._service_objectives_operations import ServiceObjectivesOperations
from ._elastic_pool_activities_operations import ElasticPoolActivitiesOperations
from ._elastic_pool_database_activities_operations import ElasticPoolDatabaseActivitiesOperations
from ._server_usages_operations import ServerUsagesOperations
from ._extended_database_blob_auditing_policies_operations import ExtendedDatabaseBlobAuditingPoliciesOperations
from ._extended_server_blob_auditing_policies_operations import ExtendedServerBlobAuditingPoliciesOperations
from ._server_blob_auditing_policies_operations import ServerBlobAuditingPoliciesOperations
from ._database_blob_auditing_policies_operations import DatabaseBlobAuditingPoliciesOperations
from ._database_advisors_operations import DatabaseAdvisorsOperations
from ._database_automatic_tuning_operations import DatabaseAutomaticTuningOperations
from ._database_columns_operations import DatabaseColumnsOperations
from ._database_recommended_actions_operations import DatabaseRecommendedActionsOperations
from ._database_schemas_operations import DatabaseSchemasOperations
from ._database_security_alert_policies_operations import DatabaseSecurityAlertPoliciesOperations
from ._database_tables_operations import DatabaseTablesOperations
from ._database_vulnerability_assessment_rule_baselines_operations import DatabaseVulnerabilityAssessmentRuleBaselinesOperations
from ._database_vulnerability_assessments_operations import DatabaseVulnerabilityAssessmentsOperations
from ._database_vulnerability_assessment_scans_operations import DatabaseVulnerabilityAssessmentScansOperations
from ._data_warehouse_user_activities_operations import DataWarehouseUserActivitiesOperations
from ._deleted_servers_operations import DeletedServersOperations
from ._elastic_pool_operations_operations import ElasticPoolOperationsOperations
from ._encryption_protectors_operations import EncryptionProtectorsOperations
from ._failover_groups_operations import FailoverGroupsOperations
from ._firewall_rules_operations import FirewallRulesOperations
from ._instance_failover_groups_operations import InstanceFailoverGroupsOperations
from ._instance_pools_operations import InstancePoolsOperations
from ._job_agents_operations import JobAgentsOperations
from ._job_credentials_operations import JobCredentialsOperations
from ._job_executions_operations import JobExecutionsOperations
from ._jobs_operations import JobsOperations
from ._job_step_executions_operations import JobStepExecutionsOperations
from ._job_steps_operations import JobStepsOperations
from ._job_target_executions_operations import JobTargetExecutionsOperations
from ._job_target_groups_operations import JobTargetGroupsOperations
from ._job_versions_operations import JobVersionsOperations
from ._capabilities_operations import CapabilitiesOperations
from ._long_term_retention_policies_operations import LongTermRetentionPoliciesOperations
from ._maintenance_window_options_operations import MaintenanceWindowOptionsOperations
from ._maintenance_windows_operations import MaintenanceWindowsOperations
from ._managed_backup_short_term_retention_policies_operations import ManagedBackupShortTermRetentionPoliciesOperations
from ._managed_database_columns_operations import ManagedDatabaseColumnsOperations
from ._managed_database_queries_operations import ManagedDatabaseQueriesOperations
from ._managed_database_restore_details_operations import ManagedDatabaseRestoreDetailsOperations
from ._managed_databases_operations import ManagedDatabasesOperations
from ._managed_database_schemas_operations import ManagedDatabaseSchemasOperations
from ._managed_database_security_alert_policies_operations import ManagedDatabaseSecurityAlertPoliciesOperations
from ._managed_database_security_events_operations import ManagedDatabaseSecurityEventsOperations
from ._managed_database_sensitivity_labels_operations import ManagedDatabaseSensitivityLabelsOperations
from ._managed_database_recommended_sensitivity_labels_operations import ManagedDatabaseRecommendedSensitivityLabelsOperations
from ._managed_database_tables_operations import ManagedDatabaseTablesOperations
from ._managed_database_transparent_data_encryption_operations import ManagedDatabaseTransparentDataEncryptionOperations
from ._managed_database_vulnerability_assessment_rule_baselines_operations import ManagedDatabaseVulnerabilityAssessmentRuleBaselinesOperations
from ._managed_database_vulnerability_assessments_operations import ManagedDatabaseVulnerabilityAssessmentsOperations
from ._managed_database_vulnerability_assessment_scans_operations import ManagedDatabaseVulnerabilityAssessmentScansOperations
from ._managed_instance_administrators_operations import ManagedInstanceAdministratorsOperations
from ._managed_instance_azure_ad_only_authentications_operations import ManagedInstanceAzureADOnlyAuthenticationsOperations
from ._managed_instance_encryption_protectors_operations import ManagedInstanceEncryptionProtectorsOperations
from ._managed_instance_keys_operations import ManagedInstanceKeysOperations
from ._managed_instance_long_term_retention_policies_operations import ManagedInstanceLongTermRetentionPoliciesOperations
from ._managed_instance_operations_operations import ManagedInstanceOperationsOperations
from ._managed_instance_private_endpoint_connections_operations import ManagedInstancePrivateEndpointConnectionsOperations
from ._managed_instance_private_link_resources_operations import ManagedInstancePrivateLinkResourcesOperations
from ._managed_instance_tde_certificates_operations import ManagedInstanceTdeCertificatesOperations
from ._managed_instance_vulnerability_assessments_operations import ManagedInstanceVulnerabilityAssessmentsOperations
from ._managed_restorable_dropped_database_backup_short_term_retention_policies_operations import ManagedRestorableDroppedDatabaseBackupShortTermRetentionPoliciesOperations
from ._managed_server_security_alert_policies_operations import ManagedServerSecurityAlert
|
PoliciesOperations
from ._operations import Operations
from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
from ._private_link_resources_operations import PrivateLinkResourcesOperations
from ._recoverable_managed_databases_operations import RecoverableManagedDatabasesOperations
from ._restore_points_operations import RestorePointsOperations
from ._sensitivity_labels_operations import Sensitivity
|
LabelsOperations
from ._recommended_sensitivity_labels_operations import RecommendedSensitivityLabelsOperations
from ._server_advisors_operations import ServerAdvisorsOperations
from ._server_automatic_tuning_operations import ServerAutomaticTuningOperations
from ._server_azure_ad_administrators_operations import ServerAzureADAdministratorsOperations
from ._server_azure_ad_only_authentications_operations import ServerAzureADOnlyAuthenticationsOperations
from ._server_dev_ops_audit_settings_operations import ServerDevOpsAuditSettingsOperations
from ._server_dns_aliases_operations import ServerDnsAliasesOperations
from ._server_keys_operations import ServerKeysOperations
from ._server_operations_operations
|
damdam-s/rma
|
product_warranty/models/return_instruction.py
|
Python
|
agpl-3.0
| 2,163
| 0
|
# -*- coding: utf-8 -*-
# ########################################################################
# #
# #
# ########################################################################
# #
# Copyright 2015 Vauxoo
# Copyright (C) 2009-2011 Akretion, Emmanuel Samyn, Benoît Guillot #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
#
|
#
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
#
|
#
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
##########################################################################
from openerp import fields, models
class ReturnInstruction(models.Model):
_name = "return.instruction"
_description = "Instructions for product return"
name = fields.Char('Title', required=True)
instructions = fields.Text(
'Instructions',
help="Instructions for product return")
is_default = fields.Boolean('Is default',
help="If is default, will be use "
"to set the default value in "
"supplier infos. Be careful to "
"have only one default")
|
gitsimon/spadup-lyra
|
frontend/import_handler.py
|
Python
|
mpl-2.0
| 1,952
| 0.002049
|
import ast
from frontend.context import Context
from frontend.stubs.stubs_paths import libraries
class ImportHandler:
"""Handler for importing other modules during the type inference"""
@staticmethod
def get_ast(path, module_name):
"""Get the AST of a python module
:param path: the path to the python module
:param module_name: the name of the python module
"""
try:
r = open(path)
except FileNotFoundError:
raise ImportError("No module named {}.".format(module_name))
tree = ast.parse(r.read())
r.close()
return tree
@staticmethod
def get_module_ast(module_name, base_folder):
"""Get the AST of a python module
:param module_name: t
|
he name of the python module
:param base_folder: the base folder containing the python module
"""
return ImportHandler.get_ast("{}/{}.py".format(base_folder, module_name), module_name)
@staticmethod
def
|
get_builtin_ast(module_name):
"""Return the AST of a built-in module"""
return ImportHandler.get_ast(libraries[module_name], module_name)
@staticmethod
def infer_import(module_name, base_folder, infer_func, solver):
"""Infer the types of a python module"""
context = Context()
if ImportHandler.is_builtin(module_name):
solver.stubs_handler.infer_builtin_lib(module_name, context, solver,
solver.config.used_names, infer_func)
else:
t = ImportHandler.get_module_ast(module_name, base_folder)
solver.infer_stubs(context, infer_func)
for stmt in t.body:
infer_func(stmt, context, solver)
return context
@staticmethod
def is_builtin(module_name):
"""Check if the imported python module is builtin"""
return module_name in libraries
|
nicko96/Chrome-Infra
|
appengine/chromium_cq_status/tests/login_test.py
|
Python
|
bsd-3-clause
| 563
| 0.003552
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-st
|
yle license that can be
# found in the LICENSE file.
from tests.testing_utils import testing
import highend
class TestLogin(testing.AppengineTestCase):
app_module = highend.app
def test_login(self):
response = self.test_app.get('/login')
self.assertEquals(302, response.status_code)
self.assertEquals(
('https://www.google.com/accounts/Login?' +
'continue=http%3A//testbed.example.com/'),
resp
|
onse.location)
|
jsfenfen/django-calaccess-raw-data
|
example/manage.py
|
Python
|
mit
| 304
| 0
|
#!
|
/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from django.core.management import execute_from_command_line
execu
|
te_from_command_line(sys.argv)
|
virantha/verifytree
|
test/test_verifytree.py
|
Python
|
apache-2.0
| 275
| 0
|
import verifytree.VerifyTree as P
import pytest
import os
import logging
|
import smtplib
from mock import Mock
from mock import p
|
atch, call
from mock import MagicMock
from mock import PropertyMock
class Testverifytree:
def setup(self):
self.p = P.VerifyTree()
|
CLLKazan/iCQA
|
qa-engine/forum/migrations/0019_auto__del_likedcomment__del_comment__add_field_node_abs_parent__chg_fi.py
|
Python
|
gpl-3.0
| 27,368
| 0.008112
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'LikedComment'
db.delete_table('forum_likedcomment')
# Deleting model 'Comment'
db.delete_table(u'comment')
# Adding field 'Node.abs_parent'
db.add_column('forum_node', 'abs_parent', self.gf('django.db.models.fields.related.ForeignKey')(related_name='all_children', null=True, to=orm['forum.Node']), keep_default=False)
# Changing field 'Question.last_activity_by'
db.alter_column(u'question', 'last_activity_by_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['forum.User']))
def backwards(self, orm):
# Adding model 'LikedComment'
db.create_table('forum_likedcomment', (
('comment', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['forum.Comment'])),
('canceled', self.gf('django.db.models.fields.BooleanField')(default=False, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['forum.User'])),
('added_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('forum', ['LikedComment'])
# Adding model 'Comment'
db.create_table(u'comment', (
('comment', self.gf('django.db.models.fields.CharField')(max_length=300)),
('node', self.gf('django.db.models.fields.related.ForeignKey')(related_name='comments', null=True, to=orm['forum.Node'])),
('deleted', self.gf('django.db.models.fields.BooleanField')(default=False, blank=True)),
('added_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('deleted_by', self.gf('django.db.models.fields.related.ForeignKey')(related_name='deleted_comments', null=True, to=orm['forum.User'], blank=True)),
('score', self.gf('django.db.models.fields.IntegerField')(default=0)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='comments', to=orm['forum.User'])),
('deleted_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('forum', ['Comment'])
# Deleting field 'Node.abs_parent'
db.delete_column('forum_node', 'abs_parent_id')
# Changing field 'Question.last_activity_by'
db.alter_column(u'question', 'last_activity_by_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['forum.User']))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'ma
|
x_length': '100'})
},
'forum.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fie
|
lds.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.User']"})
},
'forum.anonymousnode': {
'Meta': {'object_name': 'AnonymousNode', '_ormbases': ['forum.Node']},
'convertible_to': ('django.db.models.fields.CharField', [], {'default': "'node'", 'max_length': '16'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['forum.Node']", 'unique': 'True', 'primary_key': 'True'}),
'validation_hash': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_content'", 'to': "orm['forum.Node']"})
},
'forum.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'accepted_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.User']", 'null': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['forum.Node']", 'unique': 'True', 'primary_key': 'True'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
|
battlesnake/OpenSCAD
|
scripts/macosx-sanity-check.py
|
Python
|
gpl-2.0
| 4,696
| 0.010009
|
#!/usr/bin/env python
#
# This is be used to verify that all the dependant libraries of a Mac OS X executable
# are present and that they are backwards compatible with at least 10.5.
# Run with an executable as parameter
# Will return 0 if the executable an all libraries are OK
# Returns != 0 and prints some textural description on error
#
# Author: Marius Kintel <marius@kintel.net>
#
# This script lives here:
# https://github.com/kintel/MacOSX-tools
#
import sys
import os
import subprocess
import re
DEBUG = False
def usage():
print >> sys.stderr, "Usage: " + sys.argv[0] + " <executable>"
sys.exit(1)
# Try to find the given library by searching in the typical locations
# Returns the full path to the library or None if the library is not found.
def lookup_library(file):
found = None
if not re.match("/", file):
if re.search("@executable_path", file):
abs = re.sub("^@executable_path", executable_path, file)
if os.path.exists(abs): found = abs
if DEBUG: print "Lib in @executable_path found: " + found
elif re.search("\.app/", file):
found = file
if DEBUG: print "App found: " + found
elif re.search("\.framework/", file):
found = os.path.join("/Library/Frameworks", file)
if DEBUG: print "Framework found: " + found
else:
for path in os.getenv("DYLD_LIBRARY_PATH").split(':'):
abs = os.path.join(path, file)
if os.path.exists(abs): found = abs
if DEBUG: print "Library found: " + found
else:
found = file
return found
# Returns a list of dependent libraries, excluding system libs
def find_dependencies(file):
libs = []
args = ["otool", "-L", file]
if DEBUG: print "Executing " + " ".join(args)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output,err = p.communicate()
if p.returncode != 0:
print "Failed with return code " + str(
|
p.returncode) + ":"
print err
return None
deps = output.split('\n')
for dep in deps:
# print dep
|
dep = re.sub(".*:$", "", dep) # Take away header line
dep = re.sub("^\t", "", dep) # Remove initial tabs
dep = re.sub(" \(.*\)$", "", dep) # Remove trailing parentheses
if len(dep) > 0 and not re.search("/System/Library", dep) and not re.search("/usr/lib", dep):
libs.append(dep)
return libs
def validate_lib(lib):
p = subprocess.Popen(["otool", "-l", lib], stdout=subprocess.PIPE)
output = p.communicate()[0]
if p.returncode != 0: return False
if re.search("LC_DYLD_INFO_ONLY", output):
print "Error: Requires Snow Leopard: " + lib
return False
p = subprocess.Popen(["lipo", lib, "-verify_arch", "x86_64"], stdout=subprocess.PIPE)
output = p.communicate()[0]
if p.returncode != 0:
print "Error: x86_64 architecture not supported: " + lib
return False
p = subprocess.Popen(["lipo", lib, "-verify_arch", "i386"], stdout=subprocess.PIPE)
output = p.communicate()[0]
if p.returncode != 0:
print "Error: i386 architecture not supported: " + lib
return False
return True
if __name__ == '__main__':
error = False
if len(sys.argv) != 2: usage()
executable = sys.argv[1]
if DEBUG: print "Processing " + executable
executable_path = os.path.dirname(executable)
# processed is a dict {libname : [parents]} - each parent is dependant on libname
processed = {}
pending = [executable]
processed[executable] = []
while len(pending) > 0:
dep = pending.pop()
if DEBUG: print "Evaluating " + dep
deps = find_dependencies(dep)
assert(deps)
for d in deps:
absfile = lookup_library(d)
if not re.match(executable_path, absfile):
print "Error: External dependency " + d
sys.exit(1)
if absfile == None:
print "Not found: " + d
print " ..required by " + str(processed[dep])
error = True
continue
if absfile in processed:
processed[absfile].append(dep)
else:
processed[absfile] = [dep]
if DEBUG: print "Pending: " + absfile
pending.append(absfile)
for dep in processed:
if DEBUG: print "Validating: " + dep
# print " " + str(processed[dep])
if not validate_lib(dep):
print "..required by " + str(processed[dep])
error = True
if error: sys.exit(1)
else: sys.exit(0)
|
artekw/sensmon
|
sensnode/decoders/outnode.py
|
Python
|
mit
| 761
| 0.015769
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
import time
import datetime
import inspect
import simplejson as json
def outnode(data):
"""Outnode"""
a = int(data[2])
b = int(data[3])
c = int(data[4])
d = i
|
nt(data[5])
e = int(data[6])
f = int(data[7])
g = int(data[8])
h = int(data[9])
i = int(data[10])
j = int(data[11])
name = inspect.stack()[0][3] # z nazwy funcji
timestamp = int(time.mktime(datetime.datetime.now().timetuple())) #unix time
template = ({
'name':name,
'temp': str(((256 * (f&3) + e) ^ 512) - 512),
'batvol':str((256 * j) + i),
'timestamp':timestamp
})
return dict((k,v) for (k,v)
|
in template.iteritems())
|
Lilykos/invenio
|
invenio/legacy/bibedit/cli.py
|
Python
|
gpl-2.0
| 10,826
| 0.002402
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2008, 2009, 2010, 2011, 2012, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from __future__ import print_function
# pylint: disable=C0103
"""
BibEdit CLI tool.
Usage: bibedit [options]
General options::
-h, --help print this help
-V, --version print version number
Options to inspect record history::
--list-revisions [recid] list all revisions of a record
--list-revisions-details [recid] list detailed revisions of a record
--get-revision [recid.revdate] print MARCXML of given record revision
--diff-revisions [recidA.revdateB] [recidC.revdateD] print MARCXML difference between
record A dated B and record C dated D
--revert-to-revision [recid.revdate] submit
|
given record revision to
become current revision
--check-revisions [recid]
|
check if revisions are not corrupted
(* stands for all records)
--fix-revisions [recid] fix revisions that are corrupted
(* stands for all records)
--clean-revisions [recid] clean duplicate revisions
(* stands for all records)
"""
__revision__ = "$Id$"
import sys
import zlib
from invenio.legacy.dbquery import run_sql
from intbitset import intbitset
from invenio.legacy.bibedit.utils import get_marcxml_of_revision_id, \
get_record_revision_ids, get_xml_comparison, record_locked_by_other_user, \
record_locked_by_queue, revision_format_valid_p, save_xml_record, \
split_revid, get_info_of_revision_id, get_record_revisions
from invenio.legacy.bibrecord import create_record, records_identical
def print_usage():
"""Print help."""
print(__doc__)
def print_version():
"""Print version information."""
print(__revision__)
def cli_clean_revisions(recid, dry_run=True, verbose=True):
"""Clean revisions of the given recid, by removing duplicate revisions
that do not change the content of the record."""
if recid == '*':
recids = intbitset(run_sql("SELECT DISTINCT id_bibrec FROM hstRECORD"))
else:
try:
recids = [int(recid)]
except ValueError:
print('ERROR: record ID must be integer, not %s.' % recid)
sys.exit(1)
for recid in recids:
all_revisions = run_sql("SELECT marcxml, job_id, job_name, job_person, job_date FROM hstRECORD WHERE id_bibrec=%s ORDER BY job_date ASC", (recid,))
previous_rec = {}
deleted_revisions = 0
for marcxml, job_id, job_name, job_person, job_date in all_revisions:
try:
current_rec = create_record(zlib.decompress(marcxml))[0]
except Exception:
print("ERROR: corrupted revisions found. Please run %s --fix-revisions '*'" % sys.argv[0], file=sys.stderr)
sys.exit(1)
if records_identical(current_rec, previous_rec):
deleted_revisions += 1
if not dry_run:
run_sql("DELETE FROM hstRECORD WHERE id_bibrec=%s AND job_id=%s AND job_name=%s AND job_person=%s AND job_date=%s", (recid, job_id, job_name, job_person, job_date))
previous_rec = current_rec
if verbose and deleted_revisions:
print("record %s: deleted %s duplicate revisions out of %s" % (recid, deleted_revisions, len(all_revisions)))
if verbose:
print("DONE")
def cli_list_revisions(recid, details=False):
"""Print list of all known record revisions (=RECID.REVDATE) for record
RECID.
"""
try:
recid = int(recid)
except ValueError:
print('ERROR: record ID must be integer, not %s.' % recid)
sys.exit(1)
record_rev_list = get_record_revision_ids(recid)
if not details:
out = '\n'.join(record_rev_list)
else:
out = "%s %s %s %s\n" % ("# Revision".ljust(22), "# Task ID".ljust(15),
"# Author".ljust(15), "# Job Details")
out += '\n'.join([get_info_of_revision_id(revid) for revid in record_rev_list])
if out:
print(out)
else:
print('ERROR: Record %s not found.' % recid)
def cli_get_revision(revid):
"""Return MARCXML for record revision REVID (=RECID.REVDATE) of a record."""
if not revision_format_valid_p(revid):
print('ERROR: revision %s is invalid; ' \
'must be NNN.YYYYMMDDhhmmss.' % revid)
sys.exit(1)
out = get_marcxml_of_revision_id(revid)
if out:
print(out)
else:
print('ERROR: Revision %s not found.' % revid)
def cli_diff_revisions(revid1, revid2):
"""Return diffs of MARCXML for record revisions REVID1, REVID2."""
for revid in [revid1, revid2]:
if not revision_format_valid_p(revid):
print('ERROR: revision %s is invalid; ' \
'must be NNN.YYYYMMDDhhmmss.' % revid)
sys.exit(1)
xml1 = get_marcxml_of_revision_id(revid1)
if not xml1:
print('ERROR: Revision %s not found. ' % revid1)
sys.exit(1)
xml2 = get_marcxml_of_revision_id(revid2)
if not xml2:
print('ERROR: Revision %s not found. ' % revid2)
sys.exit(1)
print(get_xml_comparison(revid1, revid2, xml1, xml2))
def cli_revert_to_revision(revid):
"""Submit specified record revision REVID upload, to replace current
version.
"""
if not revision_format_valid_p(revid):
print('ERROR: revision %s is invalid; ' \
'must be NNN.YYYYMMDDhhmmss.' % revid)
sys.exit(1)
xml_record = get_marcxml_of_revision_id(revid)
if xml_record == '':
print('ERROR: Revision %s does not exist. ' % revid)
sys.exit(1)
recid = split_revid(revid)[0]
if record_locked_by_other_user(recid, -1):
print('The record is currently being edited. ' \
'Please try again in a few minutes.')
sys.exit(1)
if record_locked_by_queue(recid):
print('The record is locked because of unfinished upload tasks. ' \
'Please try again in a few minutes.')
sys.exit(1)
save_xml_record(recid, 0, xml_record)
print('Your modifications have now been submitted. They will be ' \
'processed as soon as the task queue is empty.')
def check_rev(recid, verbose=True, fix=False):
revisions = get_record_revisions(recid)
for recid, job_date in revisions:
rev = '%s.%s' % (recid, job_date)
try:
get_marcxml_of_revision_id(rev)
if verbose:
print('%s: ok' % rev)
except zlib.error:
print('%s: invalid' % rev)
if fix:
fix_rev(recid, job_date, verbose)
def fix_rev(recid, job_date, verbose=True):
sql = 'DELETE FROM hstRECORD WHERE id_bibrec = %s AND job_date = "%s"'
run_sql(sql, (recid, job_date))
def cli_check_revisions(recid):
if recid == '*':
print('Checking all records')
recids = intbitset(run_sql("SELECT id FROM bibrec ORDER BY id"))
for index, rec in enumerate(recids):
if index % 1000 == 0 and index:
print(index, 'records processed')
check_rev(rec, verbose=False)
else:
check_rev(recid)
def cli_fix_revisions(recid):
if recid == '*':
print('Fixin
|
acq4/acq4
|
acq4/devices/AxoPatch200/AxoPatch200.py
|
Python
|
mit
| 25,005
| 0.012478
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import with_statement
import time
from collections import OrderedDict
import numpy as np
from pyqtgraph.WidgetGroup import WidgetGroup
from acq4.devices.DAQGeneric import DAQGeneric, DAQGenericTask, DAQGenericTaskGui, DataMapping
from acq4.util import Qt
from acq4.util.Mutex import Mutex
from acq4.util.debug import printExc
Ui_devGui = Qt.importTemplate('.devGuiTemplate')
class AP200DataMapping(DataMapping):
def __init__(self, dev, ivModes, chans=None, mode=None):
## mode can be provided:
## - during __init__
## - explicitly when calling map functions
## - implicitly when calling map functions (uses device's current mode)
self.dev = dev
self.mode = mode
self.ivModes = ivModes
self.gainSwitch = self.dev.getGainSwitchValue()
def setMode(self, mode):
self.mode = mode
def getGain(self, chan, mode, switch=None):
if switch == None:
switch = self.gainSwitch
if mode is None:
if self.mode is None:
mode = self.dev.getMode()
else:
mode = self.mode
if chan != 'command':
return self.dev.interpretGainSwitchValue(switch, mode)
else:
#global ivModes
ivMode = self.ivModes[mode]
if ivMode == 'vc':
return 50.0 # in VC mode, sensitivity is 20mV/V; scale is 1/20e-3 = 50
else:
return 5e8 # in IC mode, sensitivity is 2nA/V; scale is 1/2e-9 = 5e8
def mapToDaq(self, chan, data, mode=None):
gain = self.getGain(chan, mode)
return data * gain
def mapFromDaq(self, chan, data, mode=None):
gain = self.getGain(chan, mode)
return data / gain
class AxoPatch200(DAQGeneric):
sigShowModeDialog = Qt.Signal(object)
sigHideModeDialog = Qt.Signal()
#sigHoldingChanged = Qt.Signal(object) ## provided by DAQGeneric
sigModeChanged = Qt.Signal(object)
def __init__(self, dm, config, name):
# Generate config to use for DAQ
daqConfig = {}
for ch in ['GainChannel', 'LPFChannel', 'ModeChannel']:
if ch not in config:
continue
daqConfig[ch] = config[ch].copy()
#if 'GainChannel' in config:
# daqConfig['gain'] = {'type': 'ai', 'channel': config['GainChannel']}
#if 'LPFChannel' in config:
# daqConfig['LPF'] = {'type': 'ai', 'channel': config['LPFChannel'], 'units': 'Hz'}
if 'ScaledSignal' in config:
#daqConfig['primary'] = {'type': 'ai', 'channel': config['ScaledSignal']}
daqConfig['primary'] = config['ScaledSignal']
if config['ScaledSignal'].get('type', None) != 'ai':
raise Exception("AxoPatch200: ScaledSignal configuration must have type:'ai'")
if 'Command' in config:
#daqConfig['command'] = {'type': 'ao', 'channel': config['Command']}
daqConfig['command'] = config['Command']
if config['Command'].get('type', None) != 'ao':
raise Exception("AxoPatch200: ScaledSignal configuration must have type:'ao'")
## Note that both of these channels can be present, but we will only ever record from one at a time.
## Usually, we'll record from "I OUTPUT" in current clamp and "10 Vm OUTPUT" in voltage clamp.
if 'SecondaryVCSignal' in config:
self.hasSecondaryChannel = True
#daqConfig['secondary'] = {'type': 'ai', 'channel': config['SecondaryVCSignal']}
daqConfig['secondary'] = config['SecondaryVCSignal']
if config['SecondaryVCSignal'].get('type', None) != 'ai':
raise Exception("AxoPatch200: SecondaryVCSignal configuration must have type:'ai'")
elif 'SecondaryICSignal' in config:
self.hasSecondaryChannel = True
#daqConfig['secondary'] = {'type': 'ai', 'channel': config['SecondaryICSignal']}
daqConfig['secondary'] = config['SecondaryICSignal']
if config['SecondaryICSignal'].get('type', None) != 'ai':
raise Exception("AxoPatch200: SecondaryICSignal configuration must have type:'ai'")
else:
self.hasSecondaryChannel = False
self.version = config.get('version', '200B')
# Axopatch gain telegraph
# telegraph should not read below 2 V in CC mode
self.gain_tel = np.array([0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 6.0, 6.5])
self.gain_vm = np.array([0.5, 0.5, 0.5, 0.5, 1, 2, 5, 10, 20, 50, 100, 200, 500]) * 1e9 ## values in mv/pA
self.gain_im = np.array([0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50, 100, 200, 500]) ## values in mV/mV
# Axopatch Lowpass Bessel Filter
self.lpf_tel = np.array([2.0, 4.0, 6.0, 8.0, 10.0])
self.lpf_freq = np.array([1.0, 2.0, 5.0, 10.0, 50.0])
if self.version == '200':
# telegraph voltage/output translation from the Axopatch 200 amplifier
self.mode_tel = np.array([6, 4, 2])
self.modeNames = OrderedDict([(0, 'V-Clamp'), (1, 'Track'), (2, 'I-Clamp')])
self.ivModes = {'V-Clamp':'vc', 'Track':'ic', 'I-Clamp':'ic', 'vc':'vc', 'ic':'
|
ic'}
|
self.modeAliases = {'ic': 'I-Clamp', 'i=0': 'Track', 'vc': 'V-Clamp'}
elif self.version == '200A':
# telegraph voltage/output translation from the Axopatch 200 amplifier
self.mode_tel = np.array([6, 4, 2, 1])
self.modeNames = OrderedDict([(0, 'V-Clamp'), (1, 'Track'), (2, 'I-Clamp Normal'), (3, 'I-Clamp Fast'), ])
self.ivModes = {'V-Clamp':'vc', 'Track':'vc', 'I-Clamp Fast':'ic', 'I-Clamp Normal':'ic', 'vc':'vc', 'ic':'ic'}
self.modeAliases = {'ic': 'I-Clamp Fast', 'i=0': 'Track', 'vc': 'V-Clamp'}
elif self.version == '200B':
# telegraph voltage/output translation from the Axopatch 200 amplifier
self.mode_tel = np.array([6, 4, 3, 2, 1])
self.modeNames = OrderedDict([(0, 'V-Clamp'), (2, 'I=0'), (4, 'I-Clamp Fast'), (3, 'I-Clamp Normal'), (1, 'Track'), ])
self.ivModes = {'V-Clamp':'vc', 'Track':'vc', 'I=0':'ic', 'I-Clamp Fast':'ic', 'I-Clamp Normal':'ic', 'vc':'vc', 'ic':'ic'}
self.modeAliases = {'ic': 'I-Clamp Fast', 'i=0': 'I=0', 'vc': 'V-Clamp'}
self.lpf_freq[-1] = 100.0 # 200B's highest LPF value is 100kHz instead of 50.
else:
raise Exception("AxoPatch200: version must be '200', '200A' or '200B' (got %r)" % self.version)
self.holding = {
'vc': config.get('vcHolding', -0.05),
'ic': config.get('icHolding', 0.0)
}
self.config = config
self.modeLock = Mutex(Mutex.Recursive) ## protects self.mdCanceled
self.devLock = Mutex(Mutex.Recursive) ## protects self.holding, possibly self.config, ..others, perhaps?
self.mdCanceled = False
DAQGeneric.__init__(self, dm, daqConfig, name)
self.modeDialog = Qt.QMessageBox()
self.modeDialog.hide()
self.modeDialog.setModal(False)
self.modeDialog.setWindowTitle("Mode Switch Request")
self.modeDialog.addButton(self.modeDialog.Cancel)
self.modeDialog.buttonClicked.connect(self.modeDialogClicked)
self.sigShowModeDialog.connect(self.showModeDialog)
self.sigHideModeDialog.connect(self.hideModeDialog)
try:
self.setHolding()
except:
printExc("Error while setting holding value:")
dm.declareInterface(name, ['clamp'], self)
def createTask(self, cmd, parentTask):
return AxoPatch200Task(self, cmd, parentTask)
def taskInterface(self, taskRunner):
return AxoPatchTaskGui(self, taskRunner, self.ivModes)
def deviceInterface(self,
|
jonathanmorgan/conv2wp
|
b2e/b2e_importer.py
|
Python
|
gpl-3.0
| 77,772
| 0.019454
|
'''
Copyright 2013 Jonathan Morgan
This file is part of http://github.com/jonathanmorgan/conv2wp.
conv2wp is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
conv2wp is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with http://github.com/jonathanmorgan/conv2wp. If not, see
<http://www.gnu.org/licenses/>.
'''
#===============================================================================#
# Imports.
#===============================================================================#
# base python imports
import datetime
import numbers
import re
# external modules
import MySQLdb
import bs4 # Beautiful Soup HTML parsing.
# conv2wp imports
from conv2wp.models import Author
from conv2wp.models import Batch
from conv2wp.models import Category
from conv2wp.models import Channel
from conv2wp.models import Comment
from conv2wp.models import Item
# python_utils imports
from python_utilities.strings.string_helper import StringHelper
#===============================================================================#
# Class definitions.
#===============================================================================#
class B2E_Importer():
#---------------------------------------------------------------------------#
# CONSTANTs-ish
#---------------------------------------------------------------------------#
STATUS_SUCCESS = "Success!"
STATUS_PREFIX_ERROR = "ERROR - "
B2E_POST_STATUS_PUBLISHED = "published"
B2E_POST_STATUS_DEPRECATED = "deprecated"
B2E_POST_STATUS_DRAFT = "draft"
RSS_DATE_STRFTIME_FORMAT = "%a, %d %b %Y %H:%M:%S"
# values that can be in user_idmode field
B2E_USER_DISPLAY_TYPE_NICKNAME = "nickname" # WHEN 'nickname' THEN u.user_nickname
B2E_USER_DISPLAY_TYPE_LOGIN = "login" # WHEN 'login' THEN u.user_login
B2E_USER_DISPLAY_TYPE_NAMEFL = "namefl" # WHEN 'namefl' THEN CONCAT(u.user_firstname, ' ', u.user_lastname)
B2E_USER_DISPLAY_TYPE_NAMELF = "namelf" # WHEN 'namelf' THEN CONCAT(u.user_lastname, ' ', u.user_firstname)
B2E_USER_DISPLAY_TYPE_FIRSTNAME = "firstname" # WHEN 'firstname' THEN u.user_firstname
B2E_USER_DISPLAY_TYPE_LASTNAME = "lastname" # WHEN 'lastname' THEN u.user_lastname
# replacing old URLs with new URLs.
BLOG_URL_OLD_HOST_AND_PATH_1 = "http://community.detroitnews.com/blogs/index.php/neighborhood/"
BLOG_URL_OLD_HOST_AND_PATH_2 = "http://community.detnews.com/blogs/index.php/neighborhood/"
BLOG_URL_NEW_HOST_AND_PATH = "http://blogs.detroitnews.com/goinghome/"
#---------------------------------------------------------------------------#
# instance variables
#---------------------------------------------------------------------------#
# database information
db_server = "localhost"
db_port = ""
db_database = ""
db_username = ""
db_password = ""
db_table_name_prefix = "evo_"
db_connection = None
db_cursor = None
# channel variables
channel_title = ""
channel_description = ""
channel_wxr_version = "1.2"
channel_generator = ""
channel_base_site_url = ""
channel_base_blog_url = ""
# configuration variables
time_zone = "-0500"
time_zone_offset = -5
store_excerpt = False
#---------------------------------------------------------------------------#
# class methods
#---------------------------------------------------------------------------#
@classmethod
def get_testing_instance( cls, password_IN = "" ):
# return reference
instance_OUT = None
# declare variables
status_message = ""
# create instance
b2e_importer = cls()
# initialize database
b2e_importer.db_database = "b2"
b2e_importer.db_username = "django_user"
b2e_importer.db_password = password_IN
# initialize channel information
b2e_importer.channel_title = "Going home: A journal on Detroit's neighborhoods"
b2e_importer.channel_description = "A Detroit News journal of the city's neighborhoods, starting with the Dobel St. area on the east side, just south of McNichols and east of Van Dyke. "
b2e_importer.channel_generator = "https://github.com/jonathanmorgan/conv2wp"
b2e_importer.channel_base_site_url = "http://detroitnews.com"
b2e_importer.channel_base_blog_url = "http://community.detroitnews.com/blogs/index.php/neighborhood"
# initialize time zone.
b2e_importer.time_zone = "-0500"
b2e_importer.time_zone_offset = -5
instance_OUT = b2e_importer
return instance_OUT
#-- END method get_testing_instance() --#
@classmethod
def test_class( cls, password_IN = "", slug_IN = "" ):
# return reference
instance_OUT = None
# declare variables
status_message = ""
# create instance
b2e_importer = cls.get_testing_instance( password_IN )
# run import for blog 14
status_message = b2e_importer.import_b2e( slug_IN, 14 )
# print the message
print( status_message )
# return instance
instance_OUT = b2e_importer
return instance_OUT
#-- END class method test_class() --#
@classmethod
def find_bad_characters( cls, password_IN = "", blog_id_IN = -1, *arg
|
s, **kwargs ):
'''
# get posts - if we have a blog ID, limit to that blog.
# For each post:
# - create Item, load with information from post.
# - get author user, add it to Authors.
# - get comments for post, store them in
|
Comments, asociated to Item.
# - get categories for post, look up and associate them.
'''
# return reference
status_OUT = cls.STATUS_SUCCESS
# declare variables
b2e_importer = None
my_db_cursor = None
table_name_prefix = ""
sql_select_posts = ""
post_query_results = None
current_post = None
current_title = ""
current_body = ""
current_fail = False
fail_list = []
fail_count = 0
# create instance
b2e_importer = cls.get_testing_instance( password_IN )
# retrieve database cursor.
my_db_cursor = b2e_importer.get_database_cursor()
# get table prefix
table_name_prefix = b2e_importer.db_table_name_prefix
# create query to retrieve posts and author information.
sql_select_posts = "SELECT * FROM " + table_name_prefix + "posts ep"
sql_select_posts += " INNER JOIN " + table_name_prefix + "categories ec"
sql_select_posts += " ON ec.cat_ID = ep.post_main_cat_ID"
# got a blog ID?
if ( ( blog_id_IN ) and ( blog_id_IN != None ) and ( blog_id_IN != "" ) and ( isinstance( blog_id_IN, numbers.Integral ) == True ) and ( blog_id_IN > 0 ) ):
# we do - add where clause.
sql_select_posts += " WHERE ec.cat_blog_ID IN ( " + str( blog_id_IN ) + " )"
#-- END check to see if ID passed in. --#
# then, ORDER_BY.
sql_select_posts += " ORDER BY ep.post_datecreated ASC;"
# execute query
try:
# execute query and retrieve results
my_db_cursor.execute( sql_select_posts )
query_results = my_db_cursor.fetchall()
# loop over categories.
for current_post in query_results:
# initialize variables
current_fail = False
|
MasteringSpark/FirstStep
|
src/main/python/scikit/loss_function.py
|
Python
|
apache-2.0
| 918
| 0.002179
|
__author__ = 'asdf2014'
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
def modified_huber_loss(y_true, y_pred):
z = y_pred * y_true
loss = -4 * z
loss[z >= -1] = (1 - z[z >= -1]) ** 2
lo
|
ss[z >= 1.] = 0
return loss
xmin, xmax = -4, 4
xx = np.linspace(xmin, xmax, 100)
plt.plot([xmin, 0, 0, xmax], [1, 1, 0, 0], 'k-',
label="Zero-one loss")
plt.plot(xx, np.where(xx < 1, 1 - xx, 0), 'g-',
label="Hinge loss")
plt.plot(xx, -np.minimum(xx, 0), 'm-',
label="Perceptron loss")
plt.plot(xx, np.log2(1 + np.exp(-
|
xx)), 'r-',
label="Log loss")
plt.plot(xx, np.where(xx < 1, 1 - xx, 0) ** 2, 'b-',
label="Squared hinge loss")
plt.plot(xx, modified_huber_loss(xx, 1), 'y--',
label="Modified Huber loss")
plt.ylim((0, 8))
plt.legend(loc="upper right")
plt.xlabel(r"Decision function $f(x)$")
plt.ylabel("$L(y, f(x))$")
plt.show()
|
felipenaselva/felipe.repository
|
plugin.video.streamhub/resources/lib/sources/en/watchfree.py
|
Python
|
gpl-2.0
| 8,466
| 0.012521
|
# -*- coding: utf-8 -*-
'''
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import proxy
class source:
def __init__(self):
self.priority = 0
self.language = ['en']
self.domains = ['watchfree.to']
self.base_link = 'http://www.watchfree.ac'
self.base_link = 'https://watchfree.unblockall.xyz'
self.base_link = 'https://watchfree.unblocker.win'
self.moviesearch_link = '/?keyword=%s&search_section=1'
self.tvsearch_link = '/?keyword=%s&search_section=2'
def movie(self, imdb, title, localtitle, aliases, year):
try:
query = self.moviesearch_link % urllib.quote_plus(cleantitle.query(title))
query = urlparse.urljoin(self.base_link, query)
result = str(proxy.request(query, 'free movies'))
if 'page=2' in result or 'page%3D2' in result: result += str(proxy.request(query + '&page=2', 'free movies'))
result = client.parseDOM(result, 'div', attrs = {'class': 'item'})
title = 'watch' + cleantitle.get(title)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in result]
result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[1] for x in years)]
r = [(proxy.parse(i[0]), i[1]) for i in result]
match = [i[0] for i in r if title == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1]]
match2 = [i[0] for i in r]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0: url = match[0] ; break
r = proxy.request(urlparse.urljoin(self.base_link, i), 'free movies')
r = re.findall('(tt\d+)', r)
if imdb in r: url = i ; break
except:
pass
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
query = self.tvsearch_link % urllib.quote_plus(cleantitle.query(tvshowtitle))
query = urlparse.urljoin(self.base_link, query)
result = str(proxy.request(query, 'free movies'))
if 'page=2' in result or 'page%3D2' in result: result += str(proxy.request(query + '&page=2', 'free movies'))
result = client.parseDOM(result, 'div', attrs = {'class': 'item'})
tvshowtitle = 'watch' + cleantitle.get(tvshowtitle)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in result]
result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[1] for x in years)]
r = [(proxy.parse(i[0]), i[1]) for i in result]
match = [i[0] for i in r if tvshowtitle == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1]]
match2 = [i[0] for i in r]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0: url = match[0] ; break
r = proxy.request(urlparse.urljoin(self.base_link, i), 'free movies')
r = re.findall('(tt\d+)', r)
if imdb in r: url = i ; break
except:
pass
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.urljoin(self.base_link, url)
result = proxy.request(url, 'tv_episode_item')
result = client.parseDOM(result, 'div', attrs = {'class': 'tv_episode_item'})
title = cleantitle.get(title)
premiered = re.compile('(\d{4})-(\d{2})-(\d{2})').findall(premiered)[0]
premiered = '%s %01d %s' % (premiered[1].replace('01','January').replace('02','February').replace('03','March').replace('04','April').replace('05','May').replace('06','June').replace('07','July').replace('08','August').replace('09','September').replace('10','October').replace('11','November').replace('12','December'), int(premiered[2]), premiered[0])
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'span', attrs = {'class': 'tv_episode_name'}), client.parseDOM(i, 'span', attrs = {'class': 'tv_num_versions'})) for i in result]
result = [(i[0], i[1][0], i[2]) for i in result if len(i[1]) > 0] + [(i[0], None, i[2]) for i in result if len(i[1]) == 0]
result = [(i[0], i[1], i[2][0]) for i in result if len(i[2]) > 0] + [(i[0], i[1], None) for i in result if len(i[2]) == 0]
result = [(i[0][0], i[1], i[2]) for i in result if len(i[0]) > 0]
url = [i for i in result if title == cleantitle.get(i[1]) and premiered == i[2]][:1]
if len(url) == 0: url = [i for i in result if premiered == i[2]]
if len(url) == 0 or len(url) > 1: url = [i for i in result if 'season-%01d-episode-%01d' % (int(season), int(episode)) in i[0]]
url = url[0][0]
url = proxy.parse(url)
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCod
|
es(url)
url = url.encode('utf-8')
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
result = proxy.request(url, 'link_ite')
links = client.parseDOM(result, 'table', attrs = {'class': 'link_ite.+?'})
for i in li
|
nks:
try:
url = client.parseDOM(i, 'a', ret='href')
url = [x for x in url if 'gtfo' in x][-1]
url = proxy.parse(url)
url = urlparse.parse_qs(urlparse.urlparse(url).query)['gtfo'][0]
url = base64.b64decode(url)
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = host.encode('utf-8')
quality = client.parseDOM(i, 'div', attrs = {'class': 'quality'})
if any(x in ['[CAM]', '[TS]'] for x in quality): quality = 'CAM'
else: quality = 'SD'
quality = quality.encode('utf-8')
source
|
cryos/tomviz
|
acquisition/tomviz/__init__.py
|
Python
|
bsd-3-clause
| 2,177
| 0
|
import logging
import logging.handlers
import os
import sys
LOG_FORMAT = '[%(asctime)s] %(levelname)s: %(message)s'
MAX_LOG_SIZE = 1024 * 1024 * 10
LOG_BACKUP_COUNT = 5
LOG_PATH = log_path = os.path.join(os.path.expanduser('~'), '.tomviz', 'logs')
LOG_PATHS = {
'stderr': '%s/stderr.log' % LOG_PATH,
'stdout': '%s/stdout.log' % LOG_PATH,
'debug': '%s/debug.log' % LOG_PATH
}
try:
os.makedirs(LOG_PATH)
except os.error:
pass
class LoggerWriter:
def __init__(self, logger, level):
self._logger = logger
self._level = level
def write(self, message):
if message != '\n':
self._logger.log(self._level, message.rstrip('\n'))
def flush(self):
pass
def setup_std_loggers():
stdout_logger = logging.getLogger('stdout')
stdout_logger.setLevel(logging.INFO)
stderr_logger = logging.getLogger('stderr')
stderr_logger.setLevel(logging.ERROR)
stderr_log_writer = LoggerWriter(stderr_logger, logging.ERROR)
stdout_log_writer = LoggerWriter(stdout_logger, logging.INFO)
file_handler = logging.handlers.RotatingFileHandler(
LOG_PATHS['stderr'], maxBytes=MAX_LOG_SIZE,
backupCount=LOG_BACKUP_COUNT)
formatter = logging.Formatter(LOG_FORMAT)
file_handler.setFormatter(formatter)
stderr_logger.addHandler(file_handler)
file_handler = logging.handlers.RotatingFileHandler(
LOG_PATHS['stdout'], maxBytes=MAX_LOG_SIZE,
backupCount=LOG_BACKUP_COUNT)
file_handler.setFormatter(formatter)
stdout_logger.addHandler(file_handler)
sys.stderr = std
|
err_log_writer
sys.stdout = stdout_log_writer
def setup_loggers(debug=False):
logger = logging.getLogger('tomviz')
logger.setLevel(logging.DEBUG
|
if debug else logging.INFO)
stream_handler = logging.StreamHandler()
file_handler = logging.handlers.RotatingFileHandler(
LOG_PATHS['debug'], maxBytes=MAX_LOG_SIZE,
backupCount=LOG_BACKUP_COUNT)
formatter = logging.Formatter(LOG_FORMAT)
stream_handler.setFormatter(formatter)
file_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.addHandler(file_handler)
|
aragos/tichu-tournament
|
python/openpyxl/styles/styleable.py
|
Python
|
mit
| 2,984
| 0.002011
|
from __future__ import absolute_import
# Copyright (c) 2010-2016 openpyxl
from warnings import warn
from .numbers import BUILTIN_FORMATS, BUILTIN_FORMATS_REVERSE
from .proxy import StyleProxy
from .cell_style import StyleArray
class StyleDescriptor(object):
def __init__(self, collection, key):
self.collection = collection
self.key = key
def __set__(self, instance, value):
coll = getattr(instance.parent.parent, self.collection)
if not getattr(instance, "_style"):
instance._style = StyleArray()
setattr(instance._style, self.key, coll.add(value))
def __get__(self, instance, cls):
coll = getattr(instance.parent.parent, self.collection)
if not getattr(instance, "_style"):
instance._style = StyleArray()
idx = getattr(instance._style, self.key)
return StyleProxy(coll[idx])
class NumberFormatDescriptor(object):
key = "numFmtId"
collection = '_number_formats'
def __set__(self, instance, value):
coll = getattr(instance.parent.parent, self.collection)
if value in BUILTIN_FORMATS_REVERSE:
idx = BUILTIN_FORMATS_REVERSE[value]
else:
idx = coll.add(value) + 164
if not getattr(instance, "_style"):
instance._style = StyleArray()
setattr(instance._style, self.key, idx)
de
|
f __get__(self, instance, cls):
|
if not getattr(instance, "_style"):
instance._style = StyleArray()
idx = getattr(instance._style, self.key)
if idx < 164:
return BUILTIN_FORMATS.get(idx, "General")
coll = getattr(instance.parent.parent, self.collection)
return coll[idx - 164]
class StyleableObject(object):
"""
Base class for styleble objects implementing proxy and lookup functions
"""
font = StyleDescriptor('_fonts', "fontId")
fill = StyleDescriptor('_fills', "fillId")
border = StyleDescriptor('_borders', "borderId")
number_format = NumberFormatDescriptor()
protection = StyleDescriptor('_protections', "protectionId")
alignment = StyleDescriptor('_alignments', "alignmentId")
__slots__ = ('parent', '_style')
def __init__(self, sheet, style_array=None):
self.parent = sheet
if style_array is not None:
style_array = StyleArray(style_array)
self._style = style_array
@property
def style_id(self):
if self._style is None:
self._style = StyleArray()
return self.parent.parent._cell_styles.add(self._style)
@property
def has_style(self):
if self._style is None:
return False
return any(self._style)
@property
def pivotButton(self):
if self._style is None:
return False
return bool(self._style[6])
@property
def quotePrefix(self):
if self._style is None:
return False
return bool(self._style[7])
|
pilliq/balance
|
tests/test_loaders.py
|
Python
|
mit
| 1,764
| 0.004535
|
# AMDG
import unittest
from datetime import datetime
from balance import BasicLoader, RepayLoader
from base_test import BaseTest
class LoaderTests(BaseTest, unittest.TestCase):
def test_basic_loader(self):
loader = BasicLoader('tests/data/basic_loader')
entries, errors = loader.load(return_errors=True)
self.assertEquals(1,
|
len(entries))
entry = entries[0]
self.assertEquals(-5.00, entry.amount)
self.assertEquals(2, len(errors))
|
self.assertEquals(errors[0]['entry'], '\n')
self.assertTrue(errors[0]['error'].message.startswith('Not a valid entry'))
self.assertEquals(errors[1]['entry'], 'this is a bad line:\n')
self.assertTrue(errors[1]['error'].message.startswith('Not a valid entry'))
def test_repay_loader(self):
loader = RepayLoader('tests/data/repay_loader')
entries, errors = loader.load(return_errors=True)
self.assertEquals(4, len(entries))
entry = entries.pop()
self.assertEquals(-11.00, entry.amount)
self.assertEquals('repay', entry.category)
self.assertEquals('#2', entry.description)
self.assertEquals('Joe', entry.vendor)
self.assertEquals('cash', entry.method)
self.assertEquals(datetime(2014,10,3), entry.date)
for e in entries:
self.assertTrue(e.method in RepayLoader.methods)
self.assertEquals(2, len(errors))
self.assertEquals(errors[0]['entry'], '#hello\n')
self.assertTrue(errors[0]['error'].message.startswith('Not a valid entry'))
self.assertEquals(errors[1]['entry'], 'bad line\n')
self.assertTrue(errors[1]['error'].message.startswith('Not a valid entry'))
if __name__ == '__main__':
unittest.main()
|
ashutosh-mishra/youtube-dl
|
youtube_dl/extractor/dailymotion.py
|
Python
|
unlicense
| 8,993
| 0.003114
|
import re
import json
import itertools
from .common import InfoExtractor
from .subtitles import SubtitlesInfoExtractor
from ..utils import (
compat_urllib_request,
compat_str,
get_element_by_attribute,
get_element_by_id,
orderedSet,
ExtractorError,
)
class DailymotionBaseInfoExtractor(InfoExtractor):
@staticmethod
def _build_request(url):
"""Build a request with the family filter disabled"""
request = compat_urllib_request.Request(url)
request.add_header('Cookie', 'family_filter=off')
request.add_header('Cookie', 'ff=off')
return request
class DailymotionIE(DailymotionBaseInfoExtractor, SubtitlesInfoExtractor):
"""Information Extractor for Dailymotion"""
_VALID_URL = r'(?i)(?:https?://)?(?:www\.)?dailymotion\.[a-z]{2,3}/(?:embed/)?video/([^/]+)'
IE_NAME = u'dailymotion'
_FORMATS = [
(u'stream_h264_ld_url', u'ld'),
(u'stream_h264_url', u'standard'),
(u'stream_h264_hq_url', u'hq'),
(u'stream_h264_hd_url', u'hd'),
(u'stream_h264_hd1080_url', u'hd180'),
]
_TESTS = [
{
u'url': u'http://www.dailymotion.com/video/x33vw9_tutoriel-de-youtubeur-dl-des-video_tech',
u'file': u'x33vw9.mp4',
u'md5': u'392c4b85a60a90dc4792da41ce3144eb',
u'info_dict': {
u"uploader": u"Amphora Alex and Van .",
u"title": u"Tutoriel de Youtubeur\"DL DES VIDEO DE YOUTUBE\""
}
},
# Vevo video
{
u'url': u'http://www.dailymotion.com/video/x149uew_katy-perry-roar-official_musi',
u'file': u'USUV71301934.mp4',
u'info_dict': {
u'title': u'Roar (Official)',
u'uploader': u'Katy Perry',
u'upload_date': u'20130905',
},
u'params': {
u'skip_download': True,
},
u'skip': u'VEVO is only available in some countries',
},
# age-restricted video
{
u'url': u'http://www.dailymotion.com/video/xyh2zz_leanna-decker-cyber-girl-of-the-year-desires-nude-playboy-plus_redband',
u'file': u'xyh2zz.mp4',
u'md5': u'0d667a7b9cebecc3c89ee93099c4159d',
u'info_dict': {
u'title': 'Leanna Decker - Cyber Girl Of The Year Desires Nude [Playboy Plus]',
u'uploader': 'HotWaves1012',
u'age_limit': 18,
}
}
]
def _real_extract(self, url):
# Extract id and simplified title from URL
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group(1).split('_')[0].split('?')[0]
url = 'http://www.dailymotion.com/video/%s' % video_id
# Retrieve video webpage to extract further information
request = self._build_request(url)
webpage = self._download_webpage(request, video_id)
# Extract URL, uploader and title from webpage
self.report_extraction(video_id)
# It may just embed a vevo video:
m_vevo = re.search(
r'<link rel="video_src" href="[^"]*?vevo.com[^"]*?videoId=(?P<id>[\w]*)',
webpage)
if m_vevo is not None:
vevo_id = m_vevo.group('id')
self.to_screen(u'Vevo video detected: %s' % vevo_id)
return self.url_result(u'vevo:%s' % vevo_id, ie='Vevo')
video_uploader = self._search_regex([r'(?im)<span class="owner[^\"]+?">[^<]+?<a [^>]+?>([^<]+?)</a>',
# Looking for official user
r'<(?:span|a) .*?rel="author".*?>([^<]+?)</'],
webpage, 'video uploader', fatal=False)
age_limit = self._rta_search(webpage)
video_upload_date = None
mobj = re.search(r'<div class="[^"]*uploaded_cont[^"]*" title="[^"]*">([0-9]{2})-([0-9]{2})-([0-9]{4})</div>', webpage)
if mobj is not None:
video_upload_date = mobj.group(3) + mobj.group(2) + mobj.group(1)
embed_url = 'http://www.dailymotion.com/embed/video/%s' % video_id
embed_page = self._download_webpage(embed_url, video_id,
u'Downloading embed page')
info = self._search_regex(r'var info = ({.*?}),$', embed_page,
'video info', flags=re.MULTILINE)
info = json.loads(info)
if info.
|
get('error') is not None:
msg = 'Couldn\'t get video, Dailymotion says: %s' % info['error']['title']
raise ExtractorError(msg, expected=True)
formats = []
for (key, format_id) in self._FORMATS:
|
video_url = info.get(key)
if video_url is not None:
m_size = re.search(r'H264-(\d+)x(\d+)', video_url)
if m_size is not None:
width, height = m_size.group(1), m_size.group(2)
else:
width, height = None, None
formats.append({
'url': video_url,
'ext': 'mp4',
'format_id': format_id,
'width': width,
'height': height,
})
if not formats:
raise ExtractorError(u'Unable to extract video URL')
# subtitles
video_subtitles = self.extract_subtitles(video_id, webpage)
if self._downloader.params.get('listsubtitles', False):
self._list_available_subtitles(video_id, webpage)
return
return {
'id': video_id,
'formats': formats,
'uploader': video_uploader,
'upload_date': video_upload_date,
'title': self._og_search_title(webpage),
'subtitles': video_subtitles,
'thumbnail': info['thumbnail_url'],
'age_limit': age_limit,
}
def _get_available_subtitles(self, video_id, webpage):
try:
sub_list = self._download_webpage(
'https://api.dailymotion.com/video/%s/subtitles?fields=id,language,url' % video_id,
video_id, note=False)
except ExtractorError as err:
self._downloader.report_warning(u'unable to download video subtitles: %s' % compat_str(err))
return {}
info = json.loads(sub_list)
if (info['total'] > 0):
sub_lang_list = dict((l['language'], l['url']) for l in info['list'])
return sub_lang_list
self._downloader.report_warning(u'video doesn\'t have subtitles')
return {}
class DailymotionPlaylistIE(DailymotionBaseInfoExtractor):
IE_NAME = u'dailymotion:playlist'
_VALID_URL = r'(?:https?://)?(?:www\.)?dailymotion\.[a-z]{2,3}/playlist/(?P<id>.+?)/'
_MORE_PAGES_INDICATOR = r'<div class="next">.*?<a.*?href="/playlist/.+?".*?>.*?</a>.*?</div>'
_PAGE_TEMPLATE = 'https://www.dailymotion.com/playlist/%s/%s'
def _extract_entries(self, id):
video_ids = []
for pagenum in itertools.count(1):
request = self._build_request(self._PAGE_TEMPLATE % (id, pagenum))
webpage = self._download_webpage(request,
id, u'Downloading page %s' % pagenum)
playlist_el = get_element_by_attribute(u'class', u'row video_list', webpage)
video_ids.extend(re.findall(r'data-id="(.+?)"', playlist_el))
if re.search(self._MORE_PAGES_INDICATOR, webpage, re.DOTALL) is None:
break
return [self.url_result('http://www.dailymotion.com/video/%s' % video_id, 'Dailymotion')
for video_id in orderedSet(video_ids)]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
playlist_id = mobj.group('id')
webpage = self._download_webpage(url, playlist_id)
return {'_type': 'playlist',
'id': playlist_id,
'title': get_element_by_id(u'playlist_name', webpage),
'entries': self._extract_entries(playlist_id),
}
class Dailymot
|
BJEBN/Geometric-Analysis
|
Scripts/Old Scripts/Extract_Non_Duplicate_Nodes.py
|
Python
|
gpl-3.0
| 2,434
| 0.013969
|
#==================================
#Author Bjorn Burr Nyberg
#University of Bergen
#Conta
|
ct bjorn.nyberg@uni.no
#Copyright 2013
|
#==================================
'''This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.'''
#==================================
#Definition of inputs and outputs
#==================================
##[SAFARI]=group
##Polygon=vector
##Precision=number 2
##Output=output vector
#Algorithm body
#==================================
from qgis.core import *
from PyQt4.QtCore import *
from itertools import chain
import processing as st
keepNodes = set([])
layer = st.getobject(Polygon)
fields = QgsFields()
fields.append( QgsField( "id", QVariant.Int ))
crs = layer.crs()
writer = QgsVectorFileWriter(Output, "CP1250", fields, 1,layer.crs(), "ESRI Shapefile")
fet = QgsFeature()
Total = layer.featureCount()
progress.setText('Extracting Nodes')
for enum,feature in enumerate(layer.getFeatures()):
progress.setPercentage(int((100 * enum)/Total))
geomType = feature.geometry()
if geomType.type() == QGis.Polygon:
if geomType.isMultipart():
geom = geomType.asMultiPolygon()
geom = list(chain(*chain(*geom)))
else:
geom = geomType.asPolygon()
geom = list(chain(*geom))
elif geomType.type() == QGis.Line:
if geomType.isMultipart():
geom = geomType.asMultiPolyline()
geom = list(chain(*geom))
else:
geom = geomType.asPolyline()
for points in geom:
if (round(points.x(),Precision),round(points.y(),Precision)) not in keepNodes:
pnt = QgsGeometry.fromPoint(QgsPoint(points.x(),points.y()))
fet.setGeometry(pnt)
writer.addFeature(fet)
keepNodes.update([(round(points.x(),Precision),round(points.y(),Precision))])
del writer
|
OCA/l10n-brazil
|
l10n_br_hr/tests/__init__.py
|
Python
|
agpl-3.0
| 71
| 0
|
from . import test
|
_l10n_br_hr
from . import test_hr_employee_dep
|
endent
|
jdgwartney/boundary-api-cli
|
tests/unit/boundary/cli_test.py
|
Python
|
apache-2.0
| 2,812
| 0.001067
|
#!/usr/bin/env python
#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import random
import string
import subprocess
import sys
import re
from cli_test_parameters import CLITestParameters
class CLITest:
def __init__(self):
pass
@staticmethod
def check_description(test_case, cli):
parameters = CLITestParameters()
test_case.assertEqual(parameters.get_value(cli.__class__.__name__, 'description'), cli.get_description())
@staticmethod
def check_curl(test_case, cli, output):
parameters = CLITestParameters()
p = re.compile(r'-u ".*?"\s')
a = p.findall(output)
output = output.replace(a[0], '')
test_case.assertEqual(parameters.get_value(cli.__class__.__name__, 'curl').encode('utf-8'), output.encode('utf-8'))
@staticmethod
def get_cli_name_from_class(i):
name = i.__class__.__name__
m = re.findall("([A-Z][a-z]+)", name)
m = [a.lower() for a
|
in m]
cli_name = str.join('-', m)
return cli_name
@staticmethod
def check_cli_help(test_case, cli):
|
parameters = CLITestParameters()
name = cli.__class__.__name__
expected_output = parameters.get_cli_help(name)
m = re.findall("([A-Z][a-z]+)", name)
m = [a.lower() for a in m]
command = str.join('-', m)
try:
output = subprocess.check_output([command, '-h'])
test_case.assertEqual(expected_output, output)
except subprocess.CalledProcessError as e:
sys.stderr.write("{0}: {1}\n".format(e.output, e.returncode))
@staticmethod
def get_cli_output(cli, args):
output = None
try:
command = CLITest.get_cli_name_from_class(cli)
args.insert(0, command)
output = subprocess.check_output(args=args)
except subprocess.CalledProcessError as e:
sys.stderr.write("{0}: {1}\n".format(e.output, e.returncode))
return output
@staticmethod
def random_string(n):
return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(n))
@staticmethod
def is_int(s):
try:
int(s)
return True
except ValueError:
return False
|
BRCDcomm/pynos
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_maps_ext.py
|
Python
|
apache-2.0
| 12,855
| 0.0021
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_maps_ext(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def maps_get_all_policy_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
input = ET.SubElement(maps_get_all_policy, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_all_policy_output_policy_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
output = ET.SubElement(maps_get_all_policy, "output")
policy = ET.SubElement(output, "policy")
policyname = ET.SubElement(policy, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
input = ET.SubElement(maps_get_rules, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_rbridgeid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
rbridgeid = ET.SubElement(rules, "rbridgeid")
rbridgeid.text = kwargs.pop('rbridgeid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_rulename(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
rulename = ET.SubElement(rules, "rulename")
rulename.text = kwargs.pop('rulename')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_groupname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
groupname = ET.SubElement(rules, "groupname")
groupname.text = kwargs.pop('groupname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_monitor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
monitor = ET.SubElement(rules, "monitor")
monitor.text = kwargs.pop('monitor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_op(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
op = ET.SubElement(rules, "op")
op.text = kwargs.pop('op')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
value = ET.SubElement(rules, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
action = ET.SubElement(rules, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_timebase(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
timebase = ET.SubElement(rules, "timebase")
timebase.text = kwargs.pop('timebase')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
policyname = ET.SubElement(rules, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_all_policy_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
input = ET.SubElement(maps_get_all_policy, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_all_policy_output_policy_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
output = ET.SubElement(maps_get_all_policy, "output")
policy = E
|
T.SubElement(output, "policy")
policyname = ET.SubElement(polic
|
y, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
input = ET.SubElement(maps_get_rules, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_rbridgeid(self, **kwargs):
"
|
iw3hxn/LibrERP
|
report_aeroo/wizard/report_print_by_action.py
|
Python
|
agpl-3.0
| 2,640
| 0.006061
|
##############################################################################
#
# Copyright (c) 2008-2012 Alistek Ltd (http://www.alistek.com) All Rights Reserved.
# General contacts <info@alistek.com>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This module is GPLv3 or newer and incompatible
# with OpenERP SA "AGPL + Private Use License"!
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from osv import osv
from osv import fields
class report_print_by_action(osv.osv_memory):
_name = 'aeroo.print_by_action'
def to_print(self, cr, uid, ids, context=None):
this = self.browse(cr, uid, ids[0], context=context)
report_xml = self.pool.get(context['active_model']).
|
browse(cr, uid, context['active_id'], context=context)
print_ids = eval("[%s]" % this.object_ids, {})
data = {'model':report_xml.model, 'ids':print_ids, 'id':print_ids[0], 'report_type': 'aeroo'}
return {
'type': 'ir.actions.report.xml',
|
'report_name': report_xml.report_name,
'datas': data,
'context':context
}
_columns = {
'name':fields.text('Object Model', readonly=True),
'object_ids':fields.char('Object IDs', size=250, required=True, help="Comma separated records ID"),
}
def _get_model(self, cr, uid, context):
return self.pool.get(context['active_model']).read(cr, uid, context['active_id'], ['model'], context=context)['model']
_defaults = {
'name': _get_model,
}
report_print_by_action()
|
SolusOS-discontinued/RepoHub
|
buildfarm/views.py
|
Python
|
mit
| 5,488
| 0.030977
|
# Patchless XMLRPC Service for Django
# Kind of hacky, and stolen from Crast on irc.freenode.net:#django
# Self documents as well, so if you call it from outside of an XML-RPC Client
# it tells you about itself and its methods
#
# Brendan W. McAdams <brendan.mcadams@thewintergrp.com>
# SimpleXMLRPCDispatcher lets us register xml-rpc calls w/o
# running a full XMLRPC Server. It's up to us to dispatch data
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
from buildfarm.models import Package, Queue
from repository.models import Repository, PisiPackage
from source.models import SourcePackage
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
import xmlrpclib
from django.template.loader import render_to_string
from django.utils import simplejson
from django.template import Context, Template
from django import forms
from django.utils import simplejson
from django.db import transaction
from django.shortcuts import redirect
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib import messages
from buildfarm.tasks import build_all_in_queue
class NewQueueForm (forms.ModelForm):
class Meta:
model = Queue
fields = ( 'name', 'builder', 'source_repo', 'binman', 'sandboxed')
def site_index (request):
queues = Queue.objects.all ()
context = { 'queues': queues, 'navhint': 'queue', 'not_reload': 'true', 'form' : NewQueueForm() }
return render (request, "buildfarm/site_index.html", context)
def package_progress_json (request, queue_id):
rdict = {}
q = Queue.objects.get(id=queue_id)
packages = Package.objects.filter(queue=q)
pct =float ( float(q.current) / q.length ) * 100
rdict = { 'percent' : pct, 'total': q.length, 'current': q.current, 'name_current': q.current_package_name }
json = simplejson.dumps(rdict, ensure_ascii=False)
return HttpResponse( json, content_type='application/json')
@staff_member_required
def delete_from_queue (request, package_id):
pkg = get_object_or_404 (Package, id=package_id)
q_id = pkg.queue.id
pkg.delete ()
return redirect ('/buildfarm/queue/%d/' % q_id)
@staff_member_required
def delete_queue (request, queue_id):
queue = get_object_or_404 (Queue, id=queue_id)
queue.delete ()
return redirect ('/manage/')
@staff_member_required
def new_queue (request):
if request.method == 'POST':
# New submission
form = NewQueueForm (request.POST)
rdict = { 'html': "<b>Fail</b>", 'tags': 'fail' }
context = Conte
|
xt ({'form': form})
if form.is_valid ():
rdict = { 'html': "The new queue has been set up", 'tags': 'success' }
model = form.save (commit=False)
model.current = 0
model.length = 0
model.current_package_name = ""
model.save ()
else:
html = render_to_string ('buildfarm/new_queue.html', {'form_queue': form})
rdict = { 'html': html,
|
'tags': 'fail' }
json = simplejson.dumps(rdict, ensure_ascii=False)
print json
# And send it off.
return HttpResponse( json, content_type='application/json')
else:
form = NewQueueForm ()
context = {'form': form }
return render (request, 'buildfarm/new_queue.html', context)
def queue_index(request, queue_id=None):
q = get_object_or_404 (Queue, id=queue_id)
packages = Package.objects.filter(queue=q).order_by('build_status')
paginator = Paginator (packages, 15)
pkg_count = q.length
if (pkg_count > 0):
pct =float ( float(q.current) / q.length ) * 100
else:
pct = 0
page = request.GET.get("page")
try:
packages = paginator.page(page)
except PageNotAnInteger:
packages = paginator.page (1)
except EmptyPage:
packages = paginator.page (paginator.num_pages)
context = {'navhint': 'queue', 'queue': q, 'package_list': packages, 'total_packages': q.length, 'current_package': q.current, 'total_pct': pct, 'current_package_name': q.current_package_name}
return render (request, "buildfarm/index.html", context)
@staff_member_required
def build_queue (request, queue_id):
queue = Queue.objects.get (id=queue_id)
messages.info (request, "Starting build of \"%s\" queue" % queue.name)
build_all_in_queue.delay (queue_id)
return redirect ('/manage/')
@staff_member_required
def populate_queue (request, queue_id):
q = Queue.objects.get(id=queue_id)
packages = SourcePackage.objects.filter (repository=q.source_repo)
failList = list ()
for package in packages:
binaries = PisiPackage.objects.filter(source_name=package.name)
if len(binaries) == 0:
# We have no binaries
print "New package for source: %s" % (package.name)
failList.append (package)
else:
for package2 in binaries:
if package2.release != package.release:
print "Newer release for: %s" % package2.name
failList.append (package)
break
try:
binary = Package.objects.get(queue=q, name=package.name)
failList.remove (package)
except:
pass
with transaction.commit_on_success():
for fail in failList:
pkg = Package ()
pkg.name = fail.name
pkg.version = fail.version
pkg.build_status = "pending"
pkg.queue = q
pkg.spec_uri = fail.source_uri
pkg.save ()
return redirect ("/buildfarm/queue/%d" % q.id)
|
AndrewKLeech/Pip-Boy
|
Game.py
|
Python
|
mit
| 42,161
| 0.0347
|
import tkinter as tk
from tkinter import *
import spotipy
import webbrowser
from PIL import Image, ImageTk
import os
from twitter import *
from io import BytesIO
import urllib.request
import urllib.parse
import PIL.Image
from PIL import ImageTk
import simplejson
song1 = "spotify:artist:58lV9VcRSjABbAbfWS6skp"
song2 = 'spotify:artist:0PFtn5NtBbbUNbU9EAmIWF'
song3 = 'spotify:artist:5INjqkS1o8h1imAzPqGZBb'
song4 = 'spotify:artist:1HwM5zlC5qNWhJtM00yXzG'
song5 = 'spotify:artist:4tZwfgrHOc3mvqYlEYSvVi'
song6 = 'spotify:artist:3AA28KZvwAUcZuOKwyblJQ'
song7 = 'spotify:artist:5T0MSzX9RC5NA6gAI6irSn'
song8 = 'spotify:artist:0SwO7SWeDHJijQ3XNS7xEE'
song9 = 'spotify:artist:1dWEYMPtNmvSVaDNLgB6NV'
# Put in token, token_key, con_secret, con_secret_key
t = Twitter(
auth=OAuth('705153959368007680-F5OUf8pvmOlXku1b7gpJPSAToqzV4Fb', 'bEGLkUJBziLc17EuKLTAMio8ChmFxP9aHYADwRXnxDsoC',
'gYDgR8lcTGcVZS9ucuEIYsMuj', '1dwHsLDN2go3aleQ8Q2vcKRfLETc51ipsP8310ayizL2p3Ycii'))
numberOfTweets = 3
class SetUp(tk.Tk): #inheriting
def __init__(self, *args, **kwargs): #method, initialisng
tk.Tk.__init__(self, *args, **kwargs)
tk.Tk.wm_iconbitmap(self, default="favicon.ico")
container = tk.Frame(self) #container for holding everything
container.pack(side = "top", fill = None, expand = False)
container.pack_propagate(0) # don't shrink
container.grid_rowconfigure(0, weight = 1)
container.grid_columnconfigure(0, weight = 1)
self.frames = {} #dictionary of frames
for F in (StartPage, RadioPage, MapPage, DataPage, InvPage, StatsPage): #loop through the number of pages
frame = F(container, self)
self.frames[F] = frame
frame.grid(row = 0, column = 0, sticky = "nsew") #alignment plus stretch
self.show_frame(StartPage)
def show_frame(self, cont):
frame = self.frames[cont]
frame.tkraise() #raised to the front
def music(self, uri):
spotify = spotipy.Spotify()
results = spotify.artist_top_tracks(uri)
#getting the track and audio link to top song
for track in results['tracks'][:1]:
text2 = track['preview_url']
return text2
def showTweets(self, x, num):
# display a number of new tweets and usernames
for i in range(0, num):
line1 = (x[i]['user']['screen_name'])
line2 = (x[i]['text'])
#w = Label(self, text=line1 + "\n" + line2 + "\n\n")
#w.pack()
self.label = Label(self,text=line1 + "\n" + line2 + "\n\n", width = 100)
self.label.place(x = 215, y = 0)
self.label.pack()
def getTweets(self):
x = t.statuses.home_timeline(screen_name="AndrewKLeech")
return x
def tweet(self):
text = entryWidget.get().strip()
if text == "":
print("Empty")
else:
t.statuses.update(status=text)
entryWidget.delete(0,END)
print("working")
def get_map(self,lat,lng):
latString = str(lat)
lngString = str(lng)
#Map url from google maps, has marker and colors included
url = ("https://maps.googleapis.com/maps/api/staticmap?center="+latString+","+lngString+"&size=450x250&zoom=16&style=feature:road.local%7Celement:geometry%7Ccolor:0x00ff00%7Cweight:1%7Cvisibility:on&style=feature:landscape%7Celement:geometry.fill%7Ccolor:0x000000%7Cvisibility:on&style=feature:landscape%7Celement:geometry.fill%7Ccolor:0x000000%7Cvisibility:on&style=feature:administrative%7Celement:labels%7Cweight:3.9%7Cvisibility:on%7Cinverse_lightness:true&style=feature:poi%7Cvisibility:simplified&markers=color:blue%7Clabel:H%7C"+latString+","+lngString+"&markers=size:tiny%7Ccolor:green%7CDelta+Junction,AK\&sensor=false")
buffer = BytesIO(urllib.request.urlopen(url).read())
pil_image = PIL.Image.open(buffer)
tk_image = ImageTk.PhotoImage(pil_image)
# put the image in program
mapLabel = Label(image=tk_image)
mapLabel.pack()
mainloop()
def get_coordinates(self,from_sensor=False):
if entryWidget2.get().strip() == "":
print("Empty")
mapLabel.pack_forget()
else:
query=entryWidget2.get().strip()
print("working")
query = query.encode('utf-8')
params = {
'address': query,
'sensor': "true" if from_sensor else "false"
}
#url used for google geocodeing api
googleGeocodeUrl = 'http://maps.googleapis.com/maps/api/geocode/json?'
url = googleGeocodeUrl + urllib.parse.urlencode(params)
json_response = urllib.request.urlopen(url)
response = simplejson.loads(json_response.read())
if response['results']:
location = response['results'][0]['geometry']['location']
latitude, longitude = location['lat'], location['lng']
print(query, latitude, longitude)
else:
latitude, longitude = None, None
print(query, "<no results>")
self.get_map(latitude, longitude)
def game(self):
w, h = 500, 500
# Pack pygame in `embed`.
root = tk.Tk()
embed = tk.Frame(root, width=w, height=h)
embed.pack()
# Tell pygame's SDL window which window ID to use
os.environ['SDL_WINDOWID'] = str(embed.winfo_id())
# Show the window so it's assigned an ID.
root.update()
# Game for Pip-Boy
# Imports
import pygame
import random
# Initialise PyGame
pygame.init()
# Set display width and height
display_width = 500
display_height = 500
# Create a gameDisplay using display_width and display_height
gameDisplay = pygame.display.set_mode((display_width, display_height))
# Set the caption of the window to Turret Defense
pygame.display.set_caption('Tank War!')
# Create colours using RGB values
black = (0, 0, 0)
green = (0, 150, 0)
lightGreen = (0, 255, 0)
# Create fonts
smallFont = pygame.font.SysFont(None, 25)
mediumFont = pygame.font.SysFont(None, 50)
largeFont = pygame.font.SysFont(None, 75)
# Initialise the clock for FPS
clock = pygame.time.Clock()
# Tank part dimensions
tankWidth = 40
tankHeight = 20
turretWidth = 5
wheelWidth = 5
# Ground height
ground = .85 * display_height
# Load sounds
fireSound = pygame.mixer.Sound("fireSound.wav")
cannon = pygame.mixer.Sound("cannon.wav")
def text_objects(text, color, size="smallFont"): # Function returns text for blitting
if size == "smallFont":
textSurface = smallFont.render(text, True, color)
if size == "mediumFont":
textSurface = mediumFont.render(text, True, color)
if size == "largeFont":
textSurface = largeFont.render(text, True, color)
return textSurface, textSurface.get_rect()
|
def text_to_button(msg, color, buttonx, buttony
|
, buttonwidth, buttonheight,
size="smallFont"): # Blits text to button
textSurface, textRect = text_objects(msg, color, size)
textRect.center = ((buttonx + buttonwidth / 2), buttony + (buttonheight / 2))
gameDisplay.blit(textSurface, textRect)
def message_to_screen(msg, color, y_displace=0, size="smallFont"): # Blits the text returned from text_objects
textSurface, textRect = text_objects(msg, color, size)
textRect.center = (int(display_width / 2), int(display_height / 2) + y_displace)
gameDisplay.blit(textSurface, textRect)
def tank(x, y, turretPosition): # Draws the tank and turret
# Casting x and y to be ints
x = int(x)
y = int(y)
# Set possible turret positions
turrets = [(x - 27, y - 2),
(x - 26, y - 5),
(x - 25, y - 8),
(x - 23, y - 12),
(x - 2
|
vmagamedov/hiku
|
hiku/federation/denormalize.py
|
Python
|
bsd-3-clause
| 314
| 0
|
from collections import deque
from hiku.denormalize.graphql import DenormalizeGraphQL
class DenormalizeEntityGraphQL(Denormal
|
izeGraphQL):
def __init__(self, graph, result, root_type_name):
|
super().__init__(graph, result, root_type_name)
self._type = deque([graph.__types__[root_type_name]])
|
svaarala/duktape
|
tests/perf/test-string-array-concat.py
|
Python
|
mit
| 174
| 0.011494
|
def test():
|
for i in xrange(int(5e3)):
t = []
for j in xrange(int(1e4)):
#t[j] = 'x'
t.append('x')
t = ''.join(t)
test(
|
)
|
kbussell/django-auditlog
|
src/auditlog/diff.py
|
Python
|
mit
| 5,123
| 0.002733
|
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Model, NOT_PROVIDED, DateTimeField
from django.utils import timezone
from django.utils.encoding import smart_text
def track_field(field):
"""
Returns whether the given field should be tracked by Auditlog.
Untracked fields are many-to-many relations and relations to the Auditlog LogEntry model.
:param field: The field to check.
:type field: Field
:return: Whether the given field should be tracked.
:rtype: bool
"""
from auditlog.models import LogEntry
# Do not track many to many relations
if field.many_to_many:
return False
# Do not track relations to LogEntry
if getattr(field, 'remote_field', None) is not None and field.remote_field.model == LogEntry:
return False
# 1.8 check
elif getattr(field, 'rel', None) is not None and field.rel.to == LogEntry:
return False
return True
def get_fields_in_model(instance):
"""
Returns the list of fields in the given model instance. Checks whether to use the official _meta API or use the raw
data. This method excludes many to many fields.
:param instance: The model instance to get the fields for
:type instance: Model
:return: The list of fields for the given model (instance)
:rtype: list
"""
assert isinstance(instance, Model)
# Check if the Django 1.8 _meta API is available
use_api = hasattr(instance._meta, 'get_fields') and callable(instance._meta.get_fields)
if use_api:
return [f for f in instance._meta.get_fields() if track_field(f)]
return instance._meta.fields
def get_field_value(obj, field):
"""
Gets the value of a given model instance field.
:param obj: The model instance.
:type obj: Model
:param field: The field you want to find the value of.
:type field: Any
:return: The value of the field as a string.
:rtype: str
"""
if isinstance(field, DateTimeField):
# DateTimeFields are timezone-aware, so we need to convert the field
# to its nai
|
ve form before we can accuratly compare them for changes.
try:
value = field.to_python(getattr(obj, field.name, None))
if value is not None and settings.USE_TZ and not timezone.is_naive(value):
value = timezone.make_naive(value, timezone=timezone.utc)
except ObjectDoesNotExist:
value = field.default if field.default is not NOT_PROVID
|
ED else None
else:
try:
value = smart_text(getattr(obj, field.name, None))
except ObjectDoesNotExist:
value = field.default if field.default is not NOT_PROVIDED else None
return value
def model_instance_diff(old, new):
"""
Calculates the differences between two model instances. One of the instances may be ``None`` (i.e., a newly
created model or deleted model). This will cause all fields with a value to have changed (from ``None``).
:param old: The old state of the model instance.
:type old: Model
:param new: The new state of the model instance.
:type new: Model
:return: A dictionary with the names of the changed fields as keys and a two tuple of the old and new field values
as value.
:rtype: dict
"""
from auditlog.registry import auditlog
if not(old is None or isinstance(old, Model)):
raise TypeError("The supplied old instance is not a valid model instance.")
if not(new is None or isinstance(new, Model)):
raise TypeError("The supplied new instance is not a valid model instance.")
diff = {}
if old is not None and new is not None:
fields = set(old._meta.fields + new._meta.fields)
model_fields = auditlog.get_model_fields(new._meta.model)
elif old is not None:
fields = set(get_fields_in_model(old))
model_fields = auditlog.get_model_fields(old._meta.model)
elif new is not None:
fields = set(get_fields_in_model(new))
model_fields = auditlog.get_model_fields(new._meta.model)
else:
fields = set()
model_fields = None
# Check if fields must be filtered
if model_fields and (model_fields['include_fields'] or model_fields['exclude_fields']) and fields:
filtered_fields = []
if model_fields['include_fields']:
filtered_fields = [field for field in fields
if field.name in model_fields['include_fields']]
else:
filtered_fields = fields
if model_fields['exclude_fields']:
filtered_fields = [field for field in filtered_fields
if field.name not in model_fields['exclude_fields']]
fields = filtered_fields
for field in fields:
old_value = get_field_value(old, field)
new_value = get_field_value(new, field)
if old_value != new_value:
diff[field.name] = (smart_text(old_value), smart_text(new_value))
if len(diff) == 0:
diff = None
return diff
|
cpennington/edx-platform
|
common/djangoapps/student/middleware.py
|
Python
|
agpl-3.0
| 1,580
| 0.001266
|
"""
Middleware that checks user standing for the purpose of keeping users with
disabled accounts from accessing the site.
"""
from django.conf import settings
from django.http import HttpResponseForbidden
from django.utils.deprecation import MiddlewareMixin
from django.utils.translation import ugettext as _
from openedx.core.djangolib.markup import HTML, Text
from student.models import UserStanding
class UserStandingMiddleware(MiddlewareMixin):
"""
Checks a user's standing on request. Returns a 403 if the user's
status is 'disabled'.
"""
def process_request(self, request):
user = request.user
try:
user_account = UserStanding.objects.get(user=user.id)
# because user is a unique field in UserStanding, there will either be
# one or zero user_accounts associated with a UserStanding
except UserStanding.DoesNotExist:
pass
else:
if user_account.account_status == UserStanding.ACCOUNT_DISABLED:
msg = Text(_(
|
'Your account has been disabled. If you believe '
'this was
|
done in error, please contact us at '
'{support_email}'
)).format(
support_email=HTML(u'<a href="mailto:{address}?subject={subject_line}">{address}</a>').format(
address=settings.DEFAULT_FEEDBACK_EMAIL,
subject_line=_('Disabled Account'),
),
)
return HttpResponseForbidden(msg)
|
Shaswat27/sympy
|
sympy/printing/lambdarepr.py
|
Python
|
bsd-3-clause
| 8,389
| 0.006556
|
from __future__ import print_function, division
from .str import StrPrinter
from sympy.utilities import default_sort_key
class LambdaPrinter(StrPrinter):
"""
This printer converts expressions into strings that can be used by
lambdify.
"""
def _print_MatrixBase(self, expr):
return "%s(%s)" % (expr.__class__.__name__,
self._print((expr.tolist())))
_print_SparseMatrix = \
_print_MutableSparseMatrix = \
_print_ImmutableSparseMatrix = \
_print_Matrix = \
_print_DenseMatrix = \
_print_MutableDenseMatrix = \
_print_ImmutableMatrix = \
_print_ImmutableDenseMatrix = \
_print_MatrixBase
def _print_Piecewise(self, expr):
result = []
i = 0
for arg in expr.args:
e = arg.expr
c = arg.cond
result.append('((')
result.append(self._print(e))
result.append(') if (')
result.append(self._print(c))
result.append(') else (')
i += 1
result = result[:-1]
result.append(') else None)')
result.append(')'*(2*i - 2))
return ''.join(result)
def _print_Sum(self, expr):
loops = (
'for {i} in range({a}, {b}+1)'.format(
i=self._print(i),
a=self._print(a),
b=self._print(b))
for i, a, b in expr.limits)
return '(builtins.sum({function} {loops}))'.format(
function=self._print(expr.function),
loops=' '.join(loops))
def _print_And(self, expr):
result = ['(']
for arg in sorted(expr.args, key=default_sort_key):
result.extend(['(', self._print(arg), ')'])
result.append(' and ')
result = result[:-1]
result.append(')')
return ''.join(result)
def _print_Or(self, expr):
result = ['(']
for arg in sorted(expr.args, key=default_sort_key):
result.extend(['(', self._print(arg), ')'])
result.append(' or ')
result = result[:-1]
result.append(')')
return ''.join(result)
def _print_Not(self, expr):
result = ['(', 'not (', self._print(expr.args[0]), '))']
return ''.join(result)
def _print_BooleanTrue(self, expr):
return "True"
def _print_BooleanFalse(self, expr):
return "False"
def _print_ITE(self, expr):
result = [
'((', self._print(expr.args[1]),
') if (', self._print(expr.args[0]),
') else (', self._print(expr.args[2]), '))'
]
return ''.join(result)
class NumPyPrinter(LambdaPrinter):
"""
Numpy
|
printer which handles vectorized piecewise functions,
logical operators, etc.
"""
_default_settings = {
"order": "none",
"full_prec": "auto",
}
def _print_seq(self, seq, delimiter=', '):
"General sequence printer: converts to tuple"
# Print tuples here instead of lists because numba supports
# tuples in nopython mode.
return
|
'({},)'.format(delimiter.join(self._print(item) for item in seq))
def _print_MatMul(self, expr):
"Matrix multiplication printer"
return '({0})'.format(').dot('.join(self._print(i) for i in expr.args))
def _print_Piecewise(self, expr):
"Piecewise function printer"
exprs = '[{0}]'.format(','.join(self._print(arg.expr) for arg in expr.args))
conds = '[{0}]'.format(','.join(self._print(arg.cond) for arg in expr.args))
# If [default_value, True] is a (expr, cond) sequence in a Piecewise object
# it will behave the same as passing the 'default' kwarg to select()
# *as long as* it is the last element in expr.args.
# If this is not the case, it may be triggered prematurely.
return 'select({0}, {1}, default=nan)'.format(conds, exprs)
def _print_Relational(self, expr):
"Relational printer for Equality and Unequality"
op = {
'==' :'equal',
'!=' :'not_equal',
'<' :'less',
'<=' :'less_equal',
'>' :'greater',
'>=' :'greater_equal',
}
if expr.rel_op in op:
lhs = self._print(expr.lhs)
rhs = self._print(expr.rhs)
return '{op}({lhs}, {rhs})'.format(op=op[expr.rel_op],
lhs=lhs,
rhs=rhs)
return super(NumPyPrinter, self)._print_Relational(expr)
def _print_And(self, expr):
"Logical And printer"
# We have to override LambdaPrinter because it uses Python 'and' keyword.
# If LambdaPrinter didn't define it, we could use StrPrinter's
# version of the function and add 'logical_and' to NUMPY_TRANSLATIONS.
return '{0}({1})'.format('logical_and', ','.join(self._print(i) for i in expr.args))
def _print_Or(self, expr):
"Logical Or printer"
# We have to override LambdaPrinter because it uses Python 'or' keyword.
# If LambdaPrinter didn't define it, we could use StrPrinter's
# version of the function and add 'logical_or' to NUMPY_TRANSLATIONS.
return '{0}({1})'.format('logical_or', ','.join(self._print(i) for i in expr.args))
def _print_Not(self, expr):
"Logical Not printer"
# We have to override LambdaPrinter because it uses Python 'not' keyword.
# If LambdaPrinter didn't define it, we would still have to define our
# own because StrPrinter doesn't define it.
return '{0}({1})'.format('logical_not', ','.join(self._print(i) for i in expr.args))
# numexpr works by altering the string passed to numexpr.evaluate
# rather than by populating a namespace. Thus a special printer...
class NumExprPrinter(LambdaPrinter):
# key, value pairs correspond to sympy name and numexpr name
# functions not appearing in this dict will raise a TypeError
_numexpr_functions = {
'sin' : 'sin',
'cos' : 'cos',
'tan' : 'tan',
'asin': 'arcsin',
'acos': 'arccos',
'atan': 'arctan',
'atan2' : 'arctan2',
'sinh' : 'sinh',
'cosh' : 'cosh',
'tanh' : 'tanh',
'asinh': 'arcsinh',
'acosh': 'arccosh',
'atanh': 'arctanh',
'ln' : 'log',
'log': 'log',
'exp': 'exp',
'sqrt' : 'sqrt',
'Abs' : 'abs',
'conjugate' : 'conj',
'im' : 'imag',
're' : 'real',
'where' : 'where',
'complex' : 'complex',
'contains' : 'contains',
}
def _print_ImaginaryUnit(self, expr):
return '1j'
def _print_seq(self, seq, delimiter=', '):
# simplified _print_seq taken from pretty.py
s = [self._print(item) for item in seq]
if s:
return delimiter.join(s)
else:
return ""
def _print_Function(self, e):
func_name = e.func.__name__
nstr = self._numexpr_functions.get(func_name, None)
if nstr is None:
# check for implemented_function
if hasattr(e, '_imp_'):
return "(%s)" % self._print(e._imp_(*e.args))
else:
raise TypeError("numexpr does not support function '%s'" %
func_name)
return "%s(%s)" % (nstr, self._print_seq(e.args))
def blacklisted(self, expr):
raise TypeError("numexpr cannot be used with %s" %
expr.__class__.__name__)
# blacklist all Matrix printing
_print_SparseMatrix = \
_print_MutableSparseMatrix = \
_print_ImmutableSparseMatrix = \
_print_Matrix = \
_print_DenseMatrix = \
_print_MutableDenseMatrix = \
_print_ImmutableMatrix = \
_print_ImmutableDenseMatrix = \
blacklisted
# blacklist some python expressions
_print_list = \
_print_tuple = \
_print_Tuple = \
_print_dict = \
_print_Dict = \
blacklisted
def doprint(self, expr):
lstr = super(NumExprPrin
|
spcui/tp-qemu
|
qemu/tests/migration.py
|
Python
|
gpl-2.0
| 9,706
| 0.000309
|
import logging
import time
import types
from autotest.client.shared import error
from virttest import utils_misc, utils_test, aexpect
def run(test, params, env):
"""
KVM migration test:
1) Get a live VM and clone it.
2) Verify that the source VM supports migration. If it does, proceed with
the test.
3) Send a migration command to the source VM and wait until it's finished.
4) Kill off the source VM.
3) Log into the destination VM after the migration is finished.
4) Compare the output of a reference command executed on the source with
the output of the same command on the destination machine.
:param test: QEMU test object.
:param params: Dictionary with test parameters.
:param env: Dictionary with the test environment.
"""
def guest_stress_start(guest_stress_test):
"""
Start a stress test in guest, Could be 'iozone', 'dd', 'stress'
:param type: type of stress test.
"""
from tests import autotest_control
timeout = 0
if guest_stress_test == "autotest":
test_type = params.get("test_type")
func = autotest_control.run_autotest_control
new_params = params.copy()
new_params["test_control_file"] = "%s.control" % test_type
args = (test, new_params, env)
timeout = 60
elif guest_stress_test == "dd":
vm = env.get_vm(env, params.get("main_vm"))
vm.verify_alive()
session = vm.wait_for_login(timeout=login_timeout)
func = session.cmd_output
args = ("for((;;)) do dd if=/dev/zero of=/tmp/test bs=5M "
"count=100; rm -f /tmp/test; done",
login_timeout, logging.info)
logging.info("Start %s test in guest", guest_stress_test)
bg = utils_test.BackgroundTest(func, args)
params["guest_stress_test_pid"] = bg
bg.start()
if timeout:
logging.info("sleep %ds waiting guest test start.", timeout)
time.sleep(timeout)
if not bg.is_alive():
raise error.TestFail("Failed to start guest test!")
def guest_stress_deamon():
"""
This deamon will keep watch the status of stress in guest. If the stress
program is finished before migration this will restart it.
"""
while True:
bg = params.get("guest_stress_test_pid")
action = params.get("action")
if action == "run":
logging.debug("Check if guest stress is still running")
guest_stress_test = params.get("guest_stress_test")
if bg and not bg.is_alive():
logging.debug("Stress process finished, restart it")
guest_stress_start(guest_stress_test)
time.sleep(30)
else:
logging.debug("Stress still on")
else:
if bg and bg.is_alive():
try:
stress_stop_cmd = params.get("stress_stop_cmd")
vm = env.get_vm(env, params.get("main_vm"))
vm.verify_alive()
session = vm.wait_for_login()
if stress_stop_cmd:
logging.warn("Killing background stress process "
"with cmd '%s', you would see some "
"error message in client test result,"
"it's harmless.", stress_stop_cmd)
session.cmd(stress_stop_cmd)
bg.join(10)
except Exception:
pass
break
time.sleep(10)
def get_functions(func_names, locals_dict):
"""
Find sub function(s) in this function with the given name(s).
"""
if not func_names:
return []
funcs = []
for f in func_names.split():
f = locals_dict.get(f)
if isinstance(f, types.FunctionType):
funcs.append(f)
return funcs
def mig_set_speed():
mig_speed = params.get("mig_speed", "1G")
return vm.monitor.migrate_set_speed(mig_speed)
login_timeout = int(params.get("login_timeout", 360))
mig_timeout = float(params.get("mig_timeout", "3600"))
mig_protocol = params.get("migration_protocol", "tcp")
mig_cancel_delay = int(params.get("mig_cancel") == "yes") * 2
mig_exec_cmd_src = params.get("migration_exec_cmd_src")
mig_exec_cmd_dst = params.get("migration_exec_cmd_dst")
if mig_exec_cmd_src and "gzip" in mig_exec_cmd_src:
mig_exec_file = params.get("migration_exec_file", "/var/tmp/exec")
mig_exec_file += "-%s" % utils_misc.generate_random_string(8)
mig_exec_cmd_src = mig_exec_cmd_src % mig_exec_file
mig_exec_cmd_dst = mig_exec_cmd_dst % mig_exec_file
offline = params.get("offline", "no") == "yes"
check = params.get("vmstate_check", "no") == "yes"
living_guest_os = params.get("migration_living_guest", "yes") == "yes"
deamon_thread = None
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
if living_guest_os:
session = vm.wait_for_login(timeout=login_timeout)
# Get the output of migration_test_command
test_command = params.get("migration_test_command")
reference_output = session.cmd_output(test_command)
# Start some process in the background (and leave the session open)
background_command = params.get("migration_bg_command", "")
session.sendline(background_command)
time.sleep(5)
# Start another session with the guest and make sure the background
# process is running
session2 = vm.wait_for_login(timeout=login_timeout)
try:
check_command = params.get("migration_bg_check_command", "")
session2.cmd(check_command, timeout=30)
session2.close()
# run some functions before migrate start.
pre_migrate = get_functions(params.get("pre_migrate"), locals())
for func in pre_migrate:
func()
# Start stress test in guest.
guest_stress_test = params.get("guest_stress_test")
if guest_stress_test:
guest_stress_start(guest_stress_test)
params["action"] = "run"
deamon_thread = utils_test.BackgroundTest(
guest_stress_deamon, ())
deamon_thread.start()
# Migrate the VM
ping_pong = params.get("ping_pong", 1)
for i in xrange(int(ping_pong)):
if i % 2 == 0:
logging.info("Round %s ping..." % str(i / 2))
else:
logging.info("Round %s pong..." % str(i / 2))
vm.migrate(mig_timeout, mig_protocol, mig_cancel_delay,
offline, check,
migration_exec_cmd_src=mig_exec_cmd_src,
migration_exec_cmd_dst=mig_exec_cmd_dst)
# Set deamon thread action to stop after migrate
params["action"] = "stop"
# run some functions after migrate finish.
post_migrate = get_functions(params.get("post_migrate"), locals())
for func in post_migrate:
func()
# Log into the guest again
logging.info("Logging into guest after migration...")
session2 = vm.wait_f
|
or_login(timeout=30)
logging.info("Logged in after migration")
# Make sure the background process is still running
session2.cmd(check_command, timeout=30)
# Get the output of migration_test_command
output = session2.cmd_output(test_command)
# Compare output to reference output
if output != reference_output:
logging.info("Command output before migration differs from "
| |
william-richard/moto
|
tests/test_cloudformation/fixtures/kms_key.py
|
Python
|
apache-2.0
| 1,645
| 0.000608
|
from __future__ import unicode_literals
template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "AWS CloudFormation Sample Template to create a KMS Key. The Fn::GetAtt is used to retrieve the ARN",
"Resources": {
"myKey": {
"Type": "AWS::KMS::Key",
"Properties": {
"Description": "Sample KmsKey",
"EnableKeyRotation": False,
"Enabled": True,
"
|
KeyPolicy": {
"Version": "2012-10-17",
"Id": "key-default-1",
"Statement": [
{
"Sid": "Enable IAM User Permissions",
"Effect": "Allow",
"Principal": {
"AWS": {
"Fn::Join": [
"",
|
[
"arn:aws:iam::",
{"Ref": "AWS::AccountId"},
":root",
],
]
}
},
"Action": "kms:*",
"Resource": "*",
}
],
},
},
}
},
"Outputs": {
"KeyArn": {
"Description": "Generated Key Arn",
"Value": {"Fn::GetAtt": ["myKey", "Arn"]},
}
},
}
|
civalin/cmdlr
|
src/cmdlr/analyzers/cartoonmad.py
|
Python
|
mit
| 3,587
| 0.000561
|
"""The www.cartoonmad.com analyzer.
[Entry examples]
- http://www.cartoonmad.com/comic/5640.html
- https://www.cartoonmad.com/comic/5640.html
"""
import re
from urllib.parse import parse_qsl
from cmdlr.analyzer import BaseAnalyzer
from cmdlr.autil import fetch
class Analyzer(BaseAnalyzer):
"""The www.cartoonmad.com analyzer.
[Entry examples]
- http://www.cartoonmad.com/comic/5640.html
- https://www.cartoonmad.com/comic/5640.html
"""
entry_patterns = [
re.compile(
r'^https?://(?:www.)?cartoonmad.com/comic/(\d+)(?:\.html)?$'
),
]
def entry_normalizer(self, url):
"""Normalize all possible entry url to single one form."""
match = self.entry_patterns[0].search(url)
id = match.group(1)
return 'https://www.cartoonmad.com/comic/{}.html'.format(id)
@staticmethod
def __extract_name(fetch_result):
return fetch_result.soup.title.string.split(' - ')[0]
@staticmethod
def __extract_volumes(fetch_result):
a_tags = (fetch_result.soup
.find('legend', string=re.compile('漫畫線上觀看'))
.parent
.find_all(href=re.compile(r'^/comic/')))
return {a.string: fetch_result.absurl(a.get('href'))
for a in a_tags}
@staticmethod
def __extract_finished(fetch_result):
return (True
if fetch_result.soup.find('img', src='/image/chap9.gif')
else False)
@staticmethod
def __extract_description(fetch_result):
return (fetch_result.soup
.find('fieldset', id='info').td.get_text().strip())
@staticmethod
def __extract_authors(fetch_result):
return [fetch_result.soup
.find(string=re.compile('作者:'))
.string.split(':')[1].strip()]
async def get_comic_info(self, url, request, **unused):
"""Get comic info."""
fetch_result = await fetch(url, request, encoding='big5')
return {
'name': self.__extract_name(fetch_result),
'volumes': self.__extract_volumes(fetch_result),
'description': self.__extract_description(fetch_result),
'authors': self.__extract_authors(fetch_result),
'finished': self.__extract_finished(fetch_result),
}
@staticmethod
def __get_imgurl_func(soup, absurl):
# print(soup.find('img', src=re.compile('comicpic.asp')))
src = soup.find('img', src=re.compile(r'comicpic.asp'))['src']
abspath, qs_string = absu
|
rl(src).s
|
plit('?', maxsplit=1)
qs = dict(parse_qsl(qs_string))
file_parts = qs['file'].split('/')
file_parts[-1] = '{:0>3}'
qs['file'] = '/'.join(file_parts)
qs_tpl = '&'.join(['{}={}'.format(key, value)
for key, value in qs.items()])
abspath_tpl = '{}?{}'.format(abspath, qs_tpl)
def get_imgurl(page_number):
return abspath_tpl.format(page_number)
return get_imgurl
async def save_volume_images(self, url, request, save_image, **unused):
"""Get all images in one volume."""
soup, absurl = await fetch(url, request, encoding='big5')
get_img_url = self.__get_imgurl_func(soup, absurl)
page_count = len(soup.find_all('option', value=True))
for page_num in range(1, page_count + 1):
save_image(
page_num,
url=get_img_url(page_num),
headers={'Referer': url},
)
|
braynebuddy/PyBrayne
|
act_twitter.py
|
Python
|
gpl-3.0
| 3,157
| 0.009819
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# TODO prog_base.py - A starting template for Python scripts
#
# Copyright 2013 Robert B. Hawkins
#
"""
SYNOPSIS
TODO prog_base [-h,--help] [-v,--verbose] [--version]
DESCRIPTION
TODO This describes how to use this script. This docstring
will be printed by the script if there is an error or
if the user requests help (-h or --help).
EXAMPLES
TODO: Show some examples of how to use this script.
EXIT STATUS
TODO: List exit codes
AUTHOR
Rob Hawkins <webwords@txhawkins.net>
LICENSE
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
VERSION
1.0.0
"""
__autho
|
r__ = "Rob Hawkins <webwords@txhawkins.net>"
__version__ = "1.0.0"
__date__ = "2013.12.01"
# Version Date
|
Notes
# ------- ---------- -------------------------------------------------------
# 1.0.0 2013.12.01 Starting script template
#
import sys, os, traceback, argparse
import time
import re
#from pexpect import run, spawn
def test ():
global options, args
# TODO: Do something more interesting here...
print 'Hello from the test() function!'
def main ():
global options, args
# TODO: Do something more interesting here...
print 'Hello world!'
if __name__ == '__main__':
try:
start_time = time.time()
#parser = argparse.ArgumentParser(description="This is the program description", usage=globals()['__doc__'])
parser = argparse.ArgumentParser(description='This is the program description')
parser.add_argument('--version', action='version', version='%(prog)s v'+__version__)
parser.add_argument ('-v', '--verbose', action='store_true', help='produce verbose output')
parser.add_argument ('-t', '--test', action='store_true', help='run test suite')
args = parser.parse_args()
#if len(args) < 1:
# parser.error ('missing argument')
if args.verbose: print time.asctime()
if args.test:
test()
else:
main()
if args.verbose: print time.asctime()
if args.verbose: print 'TOTAL TIME IN MINUTES:',
if args.verbose: print (time.time() - start_time) / 60.0
sys.exit(0)
except KeyboardInterrupt, e: # Ctrl-C
raise e
except SystemExit, e: # sys.exit()
raise e
except Exception, e:
print 'ERROR, UNEXPECTED EXCEPTION'
print str(e)
traceback.print_exc()
os._exit(1)
|
unt-libraries/django-invite
|
invite/migrations/0002_abstract_invitation.py
|
Python
|
bsd-3-clause
| 1,634
| 0.002448
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import uuid
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('invite', '0001_initial'),
]
operations = [
migrations.DeleteModel('PasswordResetInvitation'),
migrations.CreateModel(
name='PasswordResetInvitation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('activation_code', models.CharField(default=uuid.uuid4, help_text=b'unique id, generated on email submission', unique=True, max_length=36, editable=False)),
('first_name', models.CharField(max_length=36)),
('last_name', models.CharField(max_length=36)),
('username', models.CharField(max_length=36)),
('email', models.EmailField(help_text=b"the potential member's email address", max_length=41)),
('custom_msg', models.TextField(blank=True)),
('date_invited', models.DateField(help_text=b'the day on which the superuser invited the potential member', auto_now=True)),
('is_super_user', models.BooleanField(default=False)),
('groups', models.ManyToManyField(to='auth.Group')),
('permissions', models.ManyToManyField(to='auth.Permission')),
],
options={
'ordering': ['date_invited'],
|
'abstract': False,
|
},
bases=(models.Model,),
),
]
|
Terhands/saskdance
|
app/main.py
|
Python
|
gpl-3.0
| 373
| 0.010724
|
import os
import webapp2
from app import routes
webapp2_config = {'webapp2_extras.sessions':
{'secr
|
et_key': 'hfgskahjfgd736987qygukr3279rtigu',
'webapp2_extras.jinja2': {'template_path': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'templates')}}}
application =
|
webapp2.WSGIApplication(debug=True, config=webapp2_config)
routes.add_routes(application)
|
petrlosa/ella
|
ella/core/feeds.py
|
Python
|
bsd-3-clause
| 4,034
| 0.001735
|
from mimetypes import guess_type
from django.contrib.syndication.views import Feed
from django.utils.feedgenerator import Atom1Feed
from django.http import Http404
from django.template import TemplateDoesNotExist, RequestContext, NodeList
from ella.core.models import Listing, Category
from ella.core.conf import core_settings
from ella.core.managers import ListingHandler
from ella.photos.models import Format, FormatedPhoto
class RSSTopCategoryListings(Feed):
format_name = None
def __init__(self, *args, **kwargs):
super(RSSTopCategoryListings, self).__init__(*args, **kwargs)
if core_settings.RSS_ENCLOSURE_PHOTO_FORMAT:
self.format_name = core_settings.RSS_ENCLOSURE_PHOTO_FORMAT
if self.format_name is not None:
self.format = Format.objects.get_for_name(self.format_name)
else:
self.format = None
def get_object(self, request, category=''):
bits = category.split('/')
try:
cat = Category.objects.get_by_tree_path(u'/'.join(bits))
except Category.DoesNotExist:
raise Http404()
self.box_context = RequestContext(request)
return cat
def items(self, obj):
qset = Listing.objects.get_queryset_wrapper(category=obj, children=ListingHandler.ALL)
return qset.get_listings(count=core_settings.RSS_NUM_IN_FEED)
# Feed metadata
###########################################################################
def title(self, obj):
return obj.app_data.get('syndication', {}).get('title', obj.title)
def link(self, obj):
return obj.get_absolute_url()
def description(self, obj):
return obj.app_data.get('syndication', {}).get('description', obj.description)
# Item metadata
###########################################################################
def item_guid(self, item):
return str(item.publishable.pk)
def item_pubdate(self, item):
return item.publish_from
def item_title(self, item):
return item.publishable.title
def item_link(self, item):
return item.get_absolute_url()
def item_description(self, item):
if not core_settings.RSS_DESCRIPTION_BOX_TYPE:
return item.publishable.description
p =
|
item.publishable
box = p.box_class(p, core_settings.RSS_DESCRIPTION_BOX_TYPE, NodeList())
try:
desc = box.render(self.box_context)
except TemplateDoesNotExist:
desc = None
if not desc:
desc = item.publishable.description
return desc
def item_author_name(self, item):
return ', '.join(map(unicode, item.publishable.authors.all()))
# Enclosure - Photo
#############################
|
##############################################
def item_enclosure_url(self, item):
if not hasattr(item, '__enclosure_url'):
if hasattr(item.publishable, 'feed_enclosure'):
item.__enclosure_url = item.publishable.feed_enclosure()['url']
elif self.format is not None and item.publishable.photo_id:
item.__enclosure_url = FormatedPhoto.objects.get_photo_in_format(item.publishable.photo_id, self.format)['url']
else:
item.__enclosure_url = None
return item.__enclosure_url
def item_enclosure_mime_type(self, item):
enc_url = self.item_enclosure_url(item)
if enc_url:
return guess_type(enc_url)[0]
def item_enclosure_length(self, item):
# make sure get_photo_in_format was called
if hasattr(item.publishable, 'feed_enclosure'):
return item.publishable.feed_enclosure()['size']
elif self.format:
fp, created = FormatedPhoto.objects.get_or_create(photo=item.publishable.photo_id, format=self.format)
return fp.image.size
class AtomTopCategoryListings(RSSTopCategoryListings):
feed_type = Atom1Feed
subtitle = RSSTopCategoryListings.description
|
quinox/weblate
|
weblate/accounts/models.py
|
Python
|
gpl-3.0
| 26,496
| 0
|
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import binascii
from smtplib import SMTPException
from django.db import models
from django.dispatch import receiver
from django.conf import settings
from django.contrib.auth.signals import user_logged_in
from django.db.models.signals import post_save, post_migrate
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import Group, User, Permission
from django.utils import translation as django_translation
from django.template.loader import render_to_string
from django.core.mail import EmailMultiAlternatives, get_connection
from django.utils.translation import LANGUAGE_SESSION_KEY
from social.apps.django_app.default.models import UserSocialAuth
from weblate.lang.models import Language
from weblate.trans.site import get_site_url, get_site_domain
from weblate.accounts.avatar import get_user_display
from weblate.trans.util import report_error
from weblate.trans.signals import user_pre_delete
from weblate import VERSION
from weblate.logger import LOGGER
from weblate.appsettings import ANONYMOUS_USER_NAME, SITE_TITLE
def send_mails(mails):
"""Sends multiple mails in single connection."""
try:
connection = get_connection()
connection.send_messages(mails)
except SMTPException as error:
LOGGER.error('Failed to send email: %s', error)
report_error(error, sys.exc_info())
def get_author_name(user, email=True):
"""Returns formatted author name with email."""
# Get full name from database
full_name = u
|
ser.first_name
|
# Use username if full name is empty
if full_name == '':
full_name = user.username
# Add email if we are asked for it
if not email:
return full_name
return '%s <%s>' % (full_name, user.email)
def notify_merge_failure(subproject, error, status):
'''
Notification on merge failure.
'''
subscriptions = Profile.objects.subscribed_merge_failure(
subproject.project,
)
users = set()
mails = []
for subscription in subscriptions:
mails.append(
subscription.notify_merge_failure(subproject, error, status)
)
users.add(subscription.user_id)
for owner in subproject.project.owners.all():
mails.append(
owner.profile.notify_merge_failure(
subproject, error, status
)
)
# Notify admins
mails.append(
get_notification_email(
'en',
'ADMINS',
'merge_failure',
subproject,
{
'subproject': subproject,
'status': status,
'error': error,
}
)
)
send_mails(mails)
def notify_new_string(translation):
'''
Notification on new string to translate.
'''
mails = []
subscriptions = Profile.objects.subscribed_new_string(
translation.subproject.project, translation.language
)
for subscription in subscriptions:
mails.append(
subscription.notify_new_string(translation)
)
send_mails(mails)
def notify_new_language(subproject, language, user):
'''
Notify subscribed users about new language requests
'''
mails = []
subscriptions = Profile.objects.subscribed_new_language(
subproject.project,
user
)
users = set()
for subscription in subscriptions:
mails.append(
subscription.notify_new_language(subproject, language, user)
)
users.add(subscription.user_id)
for owner in subproject.project.owners.all():
mails.append(
owner.profile.notify_new_language(
subproject, language, user
)
)
# Notify admins
mails.append(
get_notification_email(
'en',
'ADMINS',
'new_language',
subproject,
{
'language': language,
'user': user,
},
user=user,
)
)
send_mails(mails)
def notify_new_translation(unit, oldunit, user):
'''
Notify subscribed users about new translation
'''
mails = []
subscriptions = Profile.objects.subscribed_any_translation(
unit.translation.subproject.project,
unit.translation.language,
user
)
for subscription in subscriptions:
mails.append(
subscription.notify_any_translation(unit, oldunit)
)
send_mails(mails)
def notify_new_contributor(unit, user):
'''
Notify about new contributor.
'''
mails = []
subscriptions = Profile.objects.subscribed_new_contributor(
unit.translation.subproject.project,
unit.translation.language,
user
)
for subscription in subscriptions:
mails.append(
subscription.notify_new_contributor(
unit.translation, user
)
)
send_mails(mails)
def notify_new_suggestion(unit, suggestion, user):
'''
Notify about new suggestion.
'''
mails = []
subscriptions = Profile.objects.subscribed_new_suggestion(
unit.translation.subproject.project,
unit.translation.language,
user
)
for subscription in subscriptions:
mails.append(
subscription.notify_new_suggestion(
unit.translation,
suggestion,
unit
)
)
send_mails(mails)
def notify_new_comment(unit, comment, user, report_source_bugs):
'''
Notify about new comment.
'''
mails = []
subscriptions = Profile.objects.subscribed_new_comment(
unit.translation.subproject.project,
comment.language,
user
)
for subscription in subscriptions:
mails.append(
subscription.notify_new_comment(unit, comment, user)
)
# Notify upstream
if comment.language is None and report_source_bugs != '':
send_notification_email(
'en',
report_source_bugs,
'new_comment',
unit.translation,
{
'unit': unit,
'comment': comment,
'subproject': unit.translation.subproject,
},
user=user,
)
send_mails(mails)
def get_notification_email(language, email, notification,
translation_obj=None, context=None, headers=None,
user=None, info=None):
'''
Renders notification email.
'''
cur_language = django_translation.get_language()
context = context or {}
headers = headers or {}
references = None
if 'unit' in context:
unit = context['unit']
references = '{0}/{1}/{2}/{3}'.format(
unit.translation.subproject.project.slug,
unit.translation.subproject.slug,
unit.translation.language.code,
unit.id
)
if references is not None:
references = '<{0}@{1}>'.format(references, get_site_domain())
headers['In-Reply-To'] = references
headers['References'] = references
try:
if info is None:
info = translation_obj.__unicode__()
LOGGER.info(
'sending notification %s on %s to %s',
notification,
info,
emai
|
markmcclain/astara
|
akanda/rug/api/configuration.py
|
Python
|
apache-2.0
| 6,984
| 0.000143
|
# Copyright 2014 DreamHost, LLC
#
# Author: DreamHost, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import re
import netaddr
from oslo.config import cfg
from akanda.rug.openstack.common import jsonutils
LOG = logging.getLogger(__name__)
DEFAULT_AS = 64512
OPTIONS = [
cfg.StrOpt('provider_rules_path'),
cfg.IntOpt('asn', default=DEFAULT_AS),
cfg.IntOpt('neighbor_asn', default=DEFAULT_AS),
]
cfg.CONF.register_opts(OPTIONS)
EXTERNAL_NET = 'external'
INTERNAL_NET = 'internal'
MANAGEMENT_NET = 'management'
SERVICE_STATIC = 'static'
SERVICE_DHCP = 'dhcp'
SERVICE_RA = 'ra'
def build_config(client, router, interfaces):
provider_rules = load_provider_rules(cfg.CONF.provider_rules_path)
networks = generate_network_config(client, router, interfaces)
gateway = get_default_v4_gateway(client, router, networks)
return {
'asn': cfg.CONF.asn,
'neighbor_asn': cfg.CONF.neighbor_asn,
'default_v4_gateway': gateway,
'networks': networks,
'labels': provider_rules.get('labels', {}),
'floating_ips': generate_floating_config(router),
'tenant_id': router.tenant_id,
'hostname': router.name
}
def get_default_v4_gateway(client, router, networks):
"""Find the IPv4 default gateway for the router.
"""
LOG.debug('networks = %r', networks)
LOG.debug('external interface = %s', router.external_port.mac_address)
# Now find the subnet that our external IP is on, and return its
# gateway.
for n in networks:
if n['network_type'] == EXTERNAL_NET:
v4_addresses = [
addr
for addr in (netaddr.IPAddress(ip.partition('/')[0])
for ip in n['interface']['addresses'])
if addr.version == 4
]
for s in n['subnets']:
subnet = netaddr.IPNetwork(s['cidr'])
if subnet.version != 4:
continue
LOG.debug(
'%s: checking if subnet %s should have the default route',
router.id, s['cidr'])
for addr in v4_addresses:
if addr in subnet:
LOG.debug(
'%s: found gateway %s for subnet %s on network %s',
router.id,
s['gateway_ip'],
s['cidr'],
n['network_id'],
)
return s['gateway_ip']
# Sometimes we are asked to build a configuration for the server
# when the external interface is still marked as "down". We can
# report that case, but we don't treat it as an error here because
# we'll be asked to do it again when the interface comes up.
LOG.info('%s: no default gateway was found', router.id)
return ''
def load_provider_rules(path):
try:
return jsonutils.load(open(path))
except: # pragma nocover
LOG.exception('unable to open provider rules: %s' % path)
def generate_network_config(client, router, interfaces):
iface_map = dict((i['lladdr'], i['ifname']) for i in interfaces)
retval = [
_network_config(
client,
router.external_port,
iface_map[router.external_port.mac_address],
EXTERNAL_NET),
_management_network_config(
router.management_port,
iface_map[router.management_port.mac_address],
interfaces,
)]
retval.extend(
_network_config(
client,
p,
iface_map[p.mac_address],
INTERNAL_NET,
client.get_network_ports(p.network_id))
for p in router.internal_ports)
return retval
def _management_network_config(port, ifname, interfaces):
for iface in interfaces:
if iface['ifname'] == ifname:
return _make_network_config_dict(
iface, MANAGEMENT_NET, port.network_id)
def _network_config(client, port, ifname, network_type, network_ports=[]):
subnets = client.get_network_subnets(port.network_id)
subnets_dict = dict((s.id, s) for s in subnets)
return _make_network_config_dict(
_interface_config(ifname, port, subnets_dict),
network_type,
port.network_id,
subnets_dict=subnets_dict,
network_ports=network_ports)
def _make_network_config_dict(interface, network_type, network_id,
v4_conf=SERVICE_STATIC, v6_conf=SERVICE_STATIC,
subnets_dict={}, network_ports=[]):
return {'interface': interface,
'network_id': network_id,
'v4_conf_service': v4_conf,
'v6_conf_service': v6_conf,
'network_type': network_type,
'subnets': [_subnet_config(s) for s in subnets_dict.values()],
'allocations': _allocation_config(network_ports, subnets_dict)}
def _interface_config(ifname, port, subnets_dict):
def fmt(fixed):
return '%s/%s' % (fixed.ip_address,
subnets_dict[fixed.subnet_id].cidr.prefixlen)
return {'ifname': ifname,
'addresses': [fmt(fixed) for fixed in port.fixed_ips]}
def _subnet_config(subnet):
return {
'cidr': str(subnet.cidr),
'dhcp_enabled': subnet.enable_dhcp and subnet.ipv6_ra_mode != 'slaac',
'dns_nameservers': subnet.dns_nameservers,
'host_routes': subnet.host_routes,
'gateway_ip': (str(subnet.gateway_ip)
if subnet.gateway_ip is not None
else ''),
}
def _allocation_config(ports, subnets_dict):
r = re.compile('[:.]')
all
|
ocations = []
for port in ports:
addrs = {
str(fixed.ip_address): subnets_dict[fixed.subnet_id].enable_dhcp
for fixed in port.fixed_ips
}
if not addrs:
continue
allocations.append(
{
'ip_addresses': addrs,
'device_id': port.device_id,
'hostname': '%s.local' % r.sub('-', sorted(addrs.keys())[0]),
'mac_address': port.mac_address
}
)
re
|
turn allocations
def generate_floating_config(router):
return [
{'floating_ip': str(fip.floating_ip), 'fixed_ip': str(fip.fixed_ip)}
for fip in router.floating_ips
]
|
ethanhlc/streamlink
|
src/streamlink/stream/stream.py
|
Python
|
bsd-2-clause
| 915
| 0
|
import io
import json
class Stream(object):
__shortname__ = "stream"
"""
This is a base class that should be inherited when implementing
different stream types. Should only be created by plugins.
"""
def __init__(self, session):
self.session = session
def __repr__(self):
return "<Stream()>"
def __json__(self):
return dict(type=type(self).sho
|
rtname())
def open(self):
"""
Attempts to open a connection to the stream.
Returns a file-like object that can be used to read the stream data.
Raises :exc:`StreamError` on failure.
"""
raise NotImplementedError
@property
def json(self):
obj = self.__json__()
return json.dumps(obj)
@classmethod
def shortname(cls):
return cls.__shortname__
class StreamIO(io.IOBase):
|
pass
__all__ = ["Stream", "StreamIO"]
|
Nitaco/ansible
|
lib/ansible/module_utils/network/ios/ios.py
|
Python
|
gpl-3.0
| 5,949
| 0.002858
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2016 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import json
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback, return_values
from ansible.module_utils.network.common.utils import to_list, ComplexList
from ansible.module_utils.connection import Connection, ConnectionError
_DEVICE_CONFIGS = {}
ios_provider_spec = {
'host': dict(),
'port': dict(type='int'),
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
'timeout': dict(type='int')
}
ios_argument_spec = {
'provider': dict(type='dict', options=ios_provider_spec),
}
ios_top_spec = {
'host': dict(removed_in_version=2.9),
'port': dict(removed_in_version=2.9, type='int'),
'username': dict(removed_in_version=2.9),
'password': dict(removed_in_version=2.9, no_log=True),
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
'authorize': dict(fallback=(e
|
nv_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
'auth_pass': dict(removed_in_version=2.9, no_log=True),
'timeout': dict(removed_in_version=2.9, type='int')
}
ios_argument_spec.update(ios_top_spec)
def get_provider_argspec():
return ios_provider_spec
def get_connection(module):
if hasattr(module, '_ios_c
|
onnection'):
return module._ios_connection
capabilities = get_capabilities(module)
network_api = capabilities.get('network_api')
if network_api == 'cliconf':
module._ios_connection = Connection(module._socket_path)
else:
module.fail_json(msg='Invalid connection type %s' % network_api)
return module._ios_connection
def get_capabilities(module):
if hasattr(module, '_ios_capabilities'):
return module._ios_capabilities
capabilities = Connection(module._socket_path).get_capabilities()
module._ios_capabilities = json.loads(capabilities)
return module._ios_capabilities
def check_args(module, warnings):
pass
def get_defaults_flag(module):
connection = get_connection(module)
out = connection.get('show running-config ?')
out = to_text(out, errors='surrogate_then_replace')
commands = set()
for line in out.splitlines():
if line.strip():
commands.add(line.strip().split()[0])
if 'all' in commands:
return ['all']
else:
return ['full']
def get_config(module, flags=None):
flag_str = ' '.join(to_list(flags))
try:
return _DEVICE_CONFIGS[flag_str]
except KeyError:
connection = get_connection(module)
out = connection.get_config(filter=flags)
cfg = to_text(out, errors='surrogate_then_replace').strip()
_DEVICE_CONFIGS[flag_str] = cfg
return cfg
def to_commands(module, commands):
spec = {
'command': dict(key=True),
'prompt': dict(),
'answer': dict()
}
transform = ComplexList(spec, module)
return transform(commands)
def run_commands(module, commands, check_rc=True):
responses = list()
connection = get_connection(module)
for cmd in to_list(commands):
if isinstance(cmd, dict):
command = cmd['command']
prompt = cmd['prompt']
answer = cmd['answer']
else:
command = cmd
prompt = None
answer = None
try:
out = connection.get(command, prompt, answer)
except ConnectionError as exc:
if check_rc:
module.fail_json(msg=to_text(exc))
else:
out = exc
try:
out = to_text(out, errors='surrogate_or_strict')
except UnicodeError:
module.fail_json(msg=u'Failed to decode output from %s: %s' % (cmd, to_text(out)))
responses.append(out)
return responses
def load_config(module, commands):
connection = get_connection(module)
try:
return connection.edit_config(commands)
except ConnectionError as exc:
module.fail_json(msg=to_text(exc))
|
Azure/azure-sdk-for-python
|
sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/activestamp/operations/_jobs_operations.py
|
Python
|
mit
| 5,627
| 0.004087
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_export_request(
vault_name: str,
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupJobsExport')
path_format_arguments = {
"vaultName": _SERIALIZER.url("vault_name", vault_name, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_pa
|
rameters,
headers=header_parameters,
**kwargs
)
class JobsOperations(object):
"""JobsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
|
:type models: ~azure.mgmt.recoveryservicesbackup.activestamp.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def export(
self,
vault_name: str,
resource_group_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> None:
"""Triggers export of jobs specified by filters and returns an OperationID to track.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param filter: OData filter options.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_export_request(
vault_name=vault_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.export.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
export.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupJobsExport'} # type: ignore
|
somat/samber
|
elf.py
|
Python
|
mit
| 861
| 0.003484
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from elftools.elf.elffile import ELFFile
from elftools.common.exceptions import ELFError
from elftools.elf.segments import NoteSegment
class ReadELF(object):
def __init__(self, file):
self.elffile = ELFFile(file)
def get_build(self):
for segment in self.elffile.iter_segments():
i
|
f isinstance(segment, NoteSegment):
for note in segment.iter_notes():
print note
def main():
if(len(sys.argv) < 2):
print "Missing argument"
sys.exit(1)
with open(sys.argv[1], 'rb') as file:
try:
readelf = ReadELF(file)
readelf.get_build()
exce
|
pt ELFError as err:
sys.stderr.write('ELF error: %s\n' % err)
sys.exit(1)
if __name__ == '__main__':
main()
|
cyaninc/django-mysql-pymysql
|
src/mysql_pymysql/schema.py
|
Python
|
bsd-3-clause
| 2,073
| 0.00193
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.models import NOT_PROVIDED
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s"
sql_alter_column_null = "MODIFY %(column)s %(type)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL"
sql_alter_column_type = "MODIFY %(column)s %(type)s"
sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"
sql_delete_unique = "ALTER TABLE %(table)s DROP INDE
|
X %(name)s"
sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
alter_str
|
ing_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
def quote_value(self, value):
return self.connection.escape(value)
def skip_default(self, field):
"""
MySQL doesn't accept default values for longtext and longblob
and implicitly treats these columns as nullable.
"""
return field.db_type(self.connection) in {'longtext', 'longblob'}
def add_field(self, model, field):
super(DatabaseSchemaEditor, self).add_field(model, field)
# Simulate the effect of a one-off default.
if self.skip_default(field) and field.default not in {None, NOT_PROVIDED}:
effective_default = self.effective_default(field)
self.execute('UPDATE %(table)s SET %(column)s = %%s' % {
'table': self.quote_name(model._meta.db_table),
'column': self.quote_name(field.column),
}, [effective_default])
|
MahdiZareie/PyShop
|
shop/models.py
|
Python
|
mit
| 691
| 0.001447
|
from django.db import models
from customer.models import Customer
from product.models import Product
from django.utils import timezone
# Create your models here.
class OrderStatus:
IN_BASKET = 0
PAYED = 1
class Order(models.Model):
customer = models.ForeignKey(C
|
ustomer)
product = models.ForeignKey(Product)
quantity = models.IntegerField(default=1)
status = models.SmallIntegerField()
created_at = models.DateTimeField()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if self.pk is None:
self.created_at = timezone.now()
super().save(f
|
orce_insert, force_update, using, update_fields)
|
marado/stars-to-addresses
|
stars-to-addresses.py
|
Python
|
gpl-3.0
| 5,571
| 0.009514
|
# -*- coding: utf-8 -*-
"""
Go to Google Bookmarks: https://www.google.com/bookmarks/
On the bottom left, click "Export bookmarks": https://www.google.com/bookmarks/bookmarks.html?hl=en
After downloading the html file, run this script on it to get the addresses
This script is based on https://gist.github.com/endolith/3896948
"""
import sys
try:
from lxml.html import document_fromstring
except ImportError:
print "You need to install lxml.html"
sys.exit()
try:
from geopy.geocoders import Nominatim
except ImportError:
print "You need to install geopy"
sys.exit()
try:
import simplekml
except ImportError:
print "You need to install simplekml"
sys.exit()
try:
import json
except ImportError:
print "You need to install json"
sys.exit()
try:
from urllib2 import urlopen
except ImportError:
print "You need to install urllib2"
sys.exit()
try:
import re
except ImportError:
print "You need to install re"
sys.exit()
try:
import time
except ImportError:
print "You need to install time"
sys.exit()
filename = r'GoogleBookmarks.html'
def main():
with open(filename) as bookmarks_file:
data = bookmarks_file.read()
geolocator = Nominatim()
kml = simplekml.Kml()
lst = list()
# Hacky and doesn't work for all of the stars:
lat_re = re.compile('markers:[^\]]*latlng[^}]*lat:([^,]*)')
lon_re = re.compile('markers:[^\]]*latlng[^}]*lng:([^}]*)')
coords_in_url = re.compile('\?q=(-?\d{,3}\.\d*),\s*(-?\d{,3}\.\d*)')
doc = document_fromstring(data)
for element, attribute, url, pos in doc.body.iterlinks():
if 'maps.google' in url:
description = element.text or ''
print description.encode('UTF8')
if coords_in_url.search(url):
# Coordinates are in URL itself
latitude = coords_in_url.search(url).groups()[0]
longitude = coords_in_url.search(url).groups()[1]
else:
# Load map and find coordinates in source of page
try:
url = url.encode('ascii', 'xmlcharrefreplace')
sock = urlopen(url.replace(' ','+').encode('UTF8'))
except Exception, e:
print 'Connection problem:'
print repr(e)
print 'Waiting 3 minutes and trying again'
time.sleep(180)
sock = urlopen(url.replace(' ','+').encode('UTF8'))
content = sock.read()
sock.close()
time.sleep(5) # Don't annoy server
try:
latitude = lat_re.findall(content)[0]
longitude = lon_re.findall(content)[0]
except IndexError:
|
latitude = ""
longitude = ""
try:
lines = content.split('\n') # --> ['Line 1', 'Line 2', 'Line 3']
for line in lines:
if re.search('cacheResponse\(', line):
splitline = line.split('(')[1].split(')')[0] + '"]'
null = None
|
values = eval(splitline)
print values[8][0][1]
longitude = str(values[0][0][1])
latitude = str(values[0][0][2])
continue
if latitude == "":
# let's try something different....
for line in lines:
if re.search('APP_INITIALIZATION_STATE', line):
splitline = line.split('[')[-1].split(']')[0].split(',')
longitude = str(splitline[1])
latitude = str(splitline[2])
continue
except IndexError:
print '[Coordinates not found]'
continue
print
print latitude, longitude
try:
if latitude != "":
location = geolocator.reverse(latitude+", "+longitude)
print(location.address)
else:
print '[Invalid coordinates]'
except ValueError:
print '[Invalid coordinates]'
print
if latitude != "":
kml.newpoint(name=description, coords=[(float(longitude), float(latitude))])
else:
kml.newpoint(name=description)
lst.append({'latitude': latitude,
'longitude': longitude,
'name': description,
'url': url.encode(encoding='utf-8', errors='replace'),
'address': location.address.encode(encoding='utf-8', errors='replace') if location else 'error'})
# this is here because there's a tendancy for this script to fail part way through...
# so at least you can get a partial result
kml.save("GoogleBookmarks.kml")
with open('GoogleBookmarks.json', mode='w') as listdump:
listdump.write(json.dumps(lst))
sys.stdout.flush()
kml.save("GoogleBookmarks.kml")
with open('GoogleBookmarks.json', mode='w') as listdump:
listdump.write(json.dumps(lst))
if __name__ == '__main__':
main()
|
obi-two/Rebelion
|
data/scripts/templates/object/weapon/melee/sword/crafted_saber/shared_sword_lightsaber_s11_training.py
|
Python
|
mit
| 490
| 0.044898
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Weapon()
result.template = "object/weapon/melee/sword/crafted_saber/shared_sword_lightsaber_s1
|
1_training.iff"
result.attribute_template_id = 10
result.stfName("weapon_name","sword_lightsaber_type11")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATION
|
S ####
return result
|
Sutto/cloud-custodian
|
tools/c7n_azure/tests_azure/test_filters_marked_for_op.py
|
Python
|
apache-2.0
| 3,258
| 0.002762
|
# Copyright 2019 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import datetime
from mock import Mock
from . import tools_tags as tools
from .azure_common import BaseTest
from c7n_azure.filters import TagActionFilter
from c7n_azure.utils import now
from c7n.filters.offhours import Time
class TagsTest(BaseTest):
def test_tag_schema_validate(self):
self.assertTrue(
self.load_policy(
tools.get_policy(filters=[
{'type': 'marked-for-op', 'op': 'delete', 'tag': 'custom'},
]), validate=True))
def _get_filter(self, data):
return TagActionFilter(data=data, manager=Mock)
def _test_filter_scenario(self, resources, expected_count, filter_definition={'op': 'stop'}):
f = self._get_filter(filter_definition)
result = f.process(resources)
self.assertEqual(expected_count, len(result))
def test_tag_filter(self):
date = now().strftime('%Y-%m-%d')
date_future = (now() + datetime.timedelta(days=1)).strftime('%Y-%m-%d')
resources = [tools.get_resource({'custodian_status': 'TTL: stop@{0}'.format(date)}),
tools.get_resource({'custodian_status': 'TTL: stop@{0}'.format(date_future)})]
self._test_filter_scenario(resources, 1)
def test_custom_tag_filter(self):
date = now().strftime('%Y-%m-%d')
resources = [tools.get_resource({'custom_status': 'TTL: stop@{0}'.format(date)})]
filter_definition = {'op': 'stop', 'tag': 'custom_status'}
self._test_filter_scenario(resources, 1, filter_definition)
def test_improper_tag_format(self):
resources = [tools.get_resource({'custodian_status': 'missingcolon}'}),
|
tools.get_resource({'custodian_status': 'missing: atsign'})]
self._test_filter_scenario(resources, 0)
def test_different_op_returns_no_resource(self):
date = now().strftime('%Y-%m-%d')
resources = [tools.get_resource({'custodian_status': 'TTL: dele
|
te@{0}'.format(date)})]
self._test_filter_scenario(resources, 0)
def test_misformatted_date_string(self):
date = "notadate"
resources = [tools.get_resource({'custodian_status': 'TTL: stop@{0}'.format(date)})]
self._test_filter_scenario(resources, 0)
def test_timezone_in_datestring(self):
tz = Time.get_tz('America/Santiago')
date = (now(tz) - datetime.timedelta(hours=1)).strftime('%Y/%m/%d %H%M %Z')
resources = [tools.get_resource({'custodian_status': 'TTL: stop@{0}'.format(date)})]
self._test_filter_scenario(resources, 1)
|
raqet/acquisition-client
|
testing/test-iscsi.py
|
Python
|
gpl-3.0
| 1,527
| 0.061559
|
#!/usr/bin/python3
import os
import sys
import subprocess
import unittest
def testequal(a,b):
if (a==b):
print ("SUCCESS")
else:
print ("FAIL")
def getPortal():
output=subprocess.check_output(["iscsi-ls","iscsi://localhost:3260"])
print (output)
target=output[7:-25]
#Test if iSCSI portal is created (last part is uid)
return target
def getLun(target,lun):
command=["iscsi-inq",
"iscsi://localhost:3260/%s/%d" % (target,lun)]
try:
output=subprocess.check_output(command,stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
output=e.output
# print (output)
return output
def getLunCapacity(target,lun):
command=["iscsi-readcapacity16",
"iscsi://localhost:3260/%s/%d" % (target,lun)]
try:
output=subprocess.check_output(command,stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
# print e
output=e.output
# print output
Size=0
for line in str(output).split("\n"):
# print line
if (line[:11]=="Total size:"):
Size=int(line[11:])
return Size
class iSCSI_luns(unittest.TestCase):
def test_portal(self):
self.as
|
sertEqual(target[:-12],"iqn.2003-01.org.linux-iscsi.testingclient-hostname:sn.")
|
def test_lun0(self):
self.assertEqual(getLunCapacity(target,0),51200)
def test_lun1(self):
self.assertEqual(getLunCapacity(target,1),0)
def test_lun2(self):
self.assertEqual(getLunCapacity(target,2),66560)
if __name__ == '__main__':
global target
target=getPortal()
# getLun(target,0)
# getLun(target,1)
# getLun(target,2)
unittest.main()
|
rdmorganiser/rdmo
|
rdmo/conditions/viewsets.py
|
Python
|
apache-2.0
| 2,141
| 0.000934
|
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.decorators import action
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from rdmo.core.exports import XMLResponse
from rdmo.core.permissions import HasModelPermission
from rdmo.core.views import ChoicesViewSet
from rdmo.core.viewsets import CopyModelMixin
from .models import Condition
from .renderers import ConditionRenderer
from .serializers.export import ConditionExportSerializer
from .serializers.v1 import ConditionIndexSerializer, ConditionSerializer
class ConditionViewSet(CopyModelMixin, ModelViewSet):
permission_classes = (HasModelPermission, )
queryset = Condition.objects.select_related('source', 'target_option') \
.prefetch_related('optionsets', 'questionsets', 'questions', 'tasks')
serializer_class = ConditionSerializer
filter_backends = (DjangoFilterBackend,)
filterset_fields = (
'uri',
'key',
'source',
'relation',
'target_text',
'target_option'
)
@action(detail=False)
def index(self, request):
queryset = Condition.objects.select_related('source', 'target_option')
serializer = ConditionIndexSerializer(queryset, many=True)
return Response(serializer.data)
@action(detail=False, permission_classes=[HasModelPermission])
def export(self, request):
serializer = ConditionExportSerializer(self.get_queryset(), many=True)
xml = ConditionRenderer().render(serializer.data)
return XMLResponse(xml, name='conditions')
@action(detail=True, url_path='export', permission_classes=[HasModelPermission])
def detail_export(se
|
lf, request, pk=None):
serializer = ConditionExportSerializer(self.get_object())
xml = ConditionRenderer().render([serializer.data])
return XMLResponse(xml, name=self.get_object().key)
class RelationViewSet(ChoicesViewSet):
permission_classes = (IsAuthe
|
nticated, )
queryset = Condition.RELATION_CHOICES
|
felix-dumit/campusbot
|
yowsup2/yowsup/layers/protocol_contacts/protocolentities/test_iq_sync_get.py
|
Python
|
mit
| 810
| 0.011111
|
from yowsup.layers.protocol_contacts.protocolentities.iq_sync_get import GetSyncIqProtocolEntity
from yowsup.structs import ProtocolTreeNode
from yowsup.layers.protocol_contacts.protocolentities.test_iq_sync import Syn
|
cIqProtocolEntityTest
class GetSyncIqProtocolEntityTest(SyncIqProtocolEntityTest):
def setUp(self):
super(GetSyncIqProtocolEntityTest, self).setUp()
self.ProtocolEntity = GetSyncIqProtocolEntity
users = [
ProtocolTreeNode("user", data = "abc"),
ProtocolTreeNode("user", data = "xyz")
]
syncNo
|
de = self.node.getChild("sync")
syncNode.setAttribute("mode", GetSyncIqProtocolEntity.MODE_DELTA)
syncNode.setAttribute("context", GetSyncIqProtocolEntity.CONTEXT_INTERACTIVE)
syncNode.addChildren(users)
|
MechanisM/musicdb
|
musicdb/classical/fuse_urls.py
|
Python
|
agpl-3.0
| 173
| 0.00578
|
from django.co
|
nf.urls.defaults import *
import views
urlpatterns = patterns('',
url(r'^$', views.f
|
use_index),
url(r'^/(?P<dir_name>[^/]+)$', views.fuse_artist),
)
|
papedaniel/oioioi
|
oioioi/zeus/utils.py
|
Python
|
gpl-3.0
| 1,047
| 0.000955
|
from oioioi.base.permissions import make_request_condition
from oioioi.base.utils import request_cached
from oioioi.problems.models import Problem
from oioioi.testrun.utils import testrun_problem_instances
from oioioi.zeus.models import ZeusProblemData
def is_zeus_problem(problem):
try:
return bool(problem.zeusproblemdata)
except ZeusProblemData.DoesNotExist:
return False
def filter_zeus_problem_instances(problem_instances):
# Not returning new
|
query_set because `instances` m
|
ay have some cache in it
problems = frozenset(Problem.objects
.filter(pk__in=[p.problem.pk for p in problem_instances])
.exclude(zeusproblemdata=None))
return [pi for pi in problem_instances if pi.problem in problems]
@request_cached
def zeus_testrun_problem_instances(request):
return filter_zeus_problem_instances(testrun_problem_instances(request))
@make_request_condition
@request_cached
def has_any_zeus_testrun_problem(request):
return len(zeus_testrun_problem_instances(request)) > 0
|
justathoughtor2/atomicApe
|
cygwin/lib/python2.7/ctypes/util.py
|
Python
|
gpl-3.0
| 9,752
| 0.002051
|
######################################################################
# This file should be kept compatible with Python 2.3, see PEP 291. #
######################################################################
import sys, os
# find_library(name) returns the pathname of a library, or None.
if os.name == "nt":
def _get_build_version():
"""Return the version of MSVC that was used to build Python.
For Python 2.3 and up, the version number is included in
sys.version. For earlier versions, assume the compiler is MSVC 6.
"""
# This function was copied from Lib/distutils/msvccompiler.py
prefix = "MSC v."
i = sys.version.find(prefix)
if i == -1:
return 6
i = i + len(prefix)
s, rest = sys.version[i:].split(" ", 1)
majorVersion = int(s[:-2]) - 6
minorVersion = int(s[2:3]) / 10.0
# I don't think paths are affected by minor version in version 6
if majorVersion == 6:
minorVersion = 0
if majorVersion >= 6:
return majorVersion + minorVersion
# else we don't know what version of the compiler this is
return None
def find_msvcrt():
"""Return the name of the VC runtime dll"""
version = _get_build_version()
if version is None:
# better be safe than sorry
return None
if version <= 6:
clibname = 'msvcrt'
else:
clibname = 'msvcr%d' % (version * 10)
# If python was built with in debug mode
import imp
if imp.get_suffixes()[0][0] == '_d.pyd':
clibname += 'd'
return clibname+'.dll'
def find_library(name):
if name in ('c', 'm'):
return find_msvcrt()
# See MSDN for the REAL search order.
for directory in os.environ['PATH'].split(os.pathsep):
fname = os.path.join(directory, name)
if os.path.isfile(fname):
return fname
if fname.lower().endswith(".dll"):
continue
fname = fname + ".dll"
if os.path.isfile(fname):
return fname
return None
if os.name == "ce":
# search path according to MSDN:
# - absolute path specified by filename
# - The .exe launch directory
# - the Windows directory
# - ROM dll files (where are they?)
# - OEM specified search path: HKLM\Loader\SystemPath
def find_library(name):
return name
if os.name == "posix" and sys.platform == "darwin":
from ctypes.macholib.dyld import dyld_find as _dyld_find
def find_library(name):
possible = ['lib%s.dylib' % name,
'%s.dylib' % name,
'%s.framework/%s' % (name, name)]
for name in possible:
try:
return _dyld_find(name)
except ValueError:
continue
return None
elif sys.platform == "cygwin":
def find_library(name):
for libdir in ['/usr/lib', '/usr/local/lib']:
for libext in ['lib%s.dll.a' % name, 'lib%s.a' % name]:
implib = os.path.join(libdir, libext)
if not os.path.exists(implib):
continue
cmd = "dlltool -I " + implib + " 2>/dev/null"
res = os.popen(cmd).read().replace("\n","")
if not res:
continue
return res
return None
elif os.name == "posix":
# Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump
import re, tempfile, errno
def _findLib_gcc(name):
expr = r'[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name)
fdout, ccout = tempfile.mkstemp()
os.close(fdout)
cmd = 'if type gcc >/dev/null 2>&1; then CC=gcc; elif type cc >/dev/null 2>&1; then CC=cc;else exit 10; fi;' \
'LANG=C LC_ALL=C $CC -Wl,-t -o ' + ccout + ' 2>&1 -l' + name
try:
f = os.popen(cmd)
try:
trace = f.read()
finally:
rv = f.close()
finally:
try:
os.unlink(ccout)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if rv == 10:
raise OSError, 'gcc or cc command not found'
res = re.search(expr, trace)
if not res:
return None
return res.group(0)
if sys.platform == "sunos5":
# use /usr/ccs/bin/dump on solaris
def _get_soname(f):
if not f:
return None
cmd = "/usr/ccs/bin/dump -Lpv 2>/dev/null " + f
f = os.popen(cmd)
try:
data = f.read()
finally:
f.close()
res = re.search(r'\[.*\]\sSONAME\s+([^\s]+)', data)
if not res:
return None
return res.group(1)
else:
def _get_soname(f):
# assuming GNU binutils / ELF
if not f:
return None
cmd = 'if ! type objdump >/dev/null 2>&1; then exit 10; fi;' \
"objdump -p -j .dynamic 2>/dev/null " + f
f = os.popen(cmd)
dump = f.read()
rv = f.close()
if rv == 10:
raise OSError, 'objdump command not found'
f = os.popen(cmd)
try:
data = f.read()
finally:
f.close()
res = re.search(r'\sSONAME\s+([^\s]+)', data)
if not res:
return None
return res.group(1)
if (sys.platform.startswith("freebsd")
or sys.platform.startswith("openbsd")
or sys.platform.startswith("dragonfly")):
def _num_version(libname):
# "libxyz.so.MAJOR.MINOR" => [ MAJOR, MINOR ]
parts = libname.split(".")
nums = []
try:
while parts:
nums.insert(0, int(parts.pop()))
except ValueError:
pass
return nums or [ sys.maxint ]
def find_library(name):
ename = re.esca
|
pe(name)
expr = r':-l%s\.\S+ => \S*/(lib%s\.\S+)' % (ename, ename)
f = os.popen('/sbin/ldconfig -r 2>/dev/null')
try:
data = f.read()
finally:
f.close()
res = re.findall(expr, data)
if not res:
|
return _get_soname(_findLib_gcc(name))
res.sort(key=_num_version)
return res[-1]
elif sys.platform == "sunos5":
def _findLib_crle(name, is64):
if not os.path.exists('/usr/bin/crle'):
return None
if is64:
cmd = 'env LC_ALL=C /usr/bin/crle -64 2>/dev/null'
else:
cmd = 'env LC_ALL=C /usr/bin/crle 2>/dev/null'
for line in os.popen(cmd).readlines():
line = line.strip()
if line.startswith('Default Library Path (ELF):'):
paths = line.split()[4]
if not paths:
return None
for dir in paths.split(":"):
libfile = os.path.join(dir, "lib%s.so" % name)
if os.path.exists(libfile):
return libfile
return None
def find_library(name, is64 = False):
return _get_soname(_findLib_crle(name, is64) or _findLib_gcc(name))
else:
def _findSoname_ldconfig(name):
import struct
if struct.calcsize('l') == 4:
machine = os.uname()[4] + '-32'
else:
machine = os.uname()[4] + '-64'
mach_map = {
'x86_64-64': 'libc6,x86-64',
'ppc64-64': 'libc6,64bit',
'sparc64-64': 'libc6,64bit',
's390x-64': 'libc6,64bit',
'ia64-64': 'libc6,IA-64',
}
abi_type = mach_map.get(machine, 'libc6')
# XXX assuming GLIBC's ldconfig (with option -p)
|
TomBaxter/waterbutler
|
tests/core/test_utils.py
|
Python
|
apache-2.0
| 2,451
| 0
|
import asyncio
from unittest import mock
import pytest
from waterbutler.core import utils
class TestAsyncRetry:
@pytest.mark.asyncio
async def test_returns_success(self):
mock_func = mock.Mock(return_value='Foo')
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
x = await retryable()
assert x == 'Foo'
assert mock_func.call_count == 1
@pytest.mark.asyncio
async def test_retries_until(self):
mock_func = mock.Mock(side_effect=[Exception(), 'Foo'])
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
x = await retryable()
assert x == 'Foo'
assert mock_func.call_count == 2
@pytest.mark.asyncio
async def test_retries_then_raises(self):
mock_func = mock.Mock(side_effect=Exception('Foo'))
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
with pytest.raises(Exception) as e:
coro = await retryable()
assert e.type == Exception
assert e.value.args == ('Fo
|
o',)
assert mock_func.call_count == 6
@pytest.mark.asyncio
async def test_retries_by_its_self(self):
mock_func = mock.Mock(side_effect=Exception())
retryable = utils.async_retry
|
(8, 0, raven=None)(mock_func)
retryable()
await asyncio.sleep(.1)
assert mock_func.call_count == 9
async def test_docstring_survives(self):
async def mytest():
'''This is a docstring'''
pass
retryable = utils.async_retry(8, 0, raven=None)(mytest)
assert retryable.__doc__ == '''This is a docstring'''
@pytest.mark.asyncio
async def test_kwargs_work(self):
async def mytest(mack, *args, **kwargs):
mack()
assert args == ('test', 'Foo')
assert kwargs == {'test': 'Foo', 'baz': 'bam'}
return True
retryable = utils.async_retry(8, 0, raven=None)(mytest)
merk = mock.Mock(side_effect=[Exception(''), 5])
fut = retryable(merk, 'test', 'Foo', test='Foo', baz='bam')
assert await fut
assert merk.call_count == 2
@pytest.mark.asyncio
async def test_all_retry(self):
mock_func = mock.Mock(side_effect=Exception())
retryable = utils.async_retry(8, 0, raven=None)(mock_func)
retryable()
retryable()
await asyncio.sleep(.1)
assert mock_func.call_count == 18
|
macosforge/ccs-calendarserver
|
txdav/common/datastore/podding/migration/test/test_migration.py
|
Python
|
apache-2.0
| 30,577
| 0.002943
|
##
# Copyright (c) 2015-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from pycalendar.datetime import DateTime
from twext.enterprise.jobs.jobitem import JobItem
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.python.filepath import FilePath
from twistedcaldav.config import config
from twistedcaldav.ical import Component
from txdav.caldav.datastore.scheduling.ischedule.delivery import IScheduleRequest
from txdav.caldav.datastore.scheduling.ischedule.resource import IScheduleInboxResource
from txdav.caldav.datastore.scheduling.work import allScheduleWork
from txdav.caldav.datastore.test.common import CaptureProtocol
from txdav.common.datastore.podding.migration.home_sync import CrossPodHomeSync
from txdav.common.datastore.podding.migration.sync_metadata import CalendarMigrationRecord, \
AttachmentMigrationRecord, CalendarObjectMigrationRecord
from txdav.common.datastore.podding.migration.work import HomeCleanupWork, MigratedHomeCleanupWork, MigrationCleanupWork
from txdav.common.datastore.podding.test.util import MultiStoreConduitTest
from txdav.common.datastore.sql_directory import DelegateRecord,\
DelegateGroupsRecord, ExternalDelegateGroupsRecord
from txdav.common.datastore.sql_tables import _BIND_MODE_READ, \
_HOME_STATUS_DISABLED, _HOME_STATUS_NORMAL, _HOME_STATUS_EXTERNAL, \
_HOME_STATUS_MIGRATING
from txdav.common.datastore.test.util import populateCalendarsFrom
from txdav.who.delegates import Delegates
from txweb2.dav.test.util import SimpleRequest
from txweb2.http_headers import MimeType
from txweb2.stream import MemoryStream
class TestCompleteMigrationCycle(MultiStoreConduitTest):
"""
Test that a full migration cycle using L{CrossPodHomeSync} works.
"""
def __init__(self, methodName='runTest'):
super(TestCompleteMigrationCycle, self).__init__(methodName)
self.stash = {}
@inlineCallbacks
def setUp(self):
@inlineCallbacks
def _fakeSubmitRequest(iself, ssl, host, port, request):
pod = (port - 8008) / 100
inbox = IScheduleInboxResource(self.site.resource, self.theStoreUnderTest(pod), podding=True)
response = yield inbox.http_POST(SimpleRequest(
self.site,
"POST",
"http://{host}:{port}/podding".format(host=host, port=port),
request.headers,
request.stream.mem,
))
returnValue(response)
self.patch(IScheduleRequest, "_submitRequest", _fakeSubmitRequest)
self.accounts = FilePath(__file__).sibling("accounts").child("groupAccounts.xml")
self.augments = FilePath(__file__).sibling("accounts").child("augments.xml")
yield super(TestCompleteMigrationCycle, self).setUp()
yield self.populate()
# Speed up work
self.patch(MigrationCleanupWork, "notBeforeDelay", 1)
self.patch(HomeCleanupWork, "notBeforeDelay", 1)
self.patch(MigratedHomeCleanupWork, "notBeforeDelay", 1)
def configure(self):
super(TestCompleteMigrationCycle, self).configure()
config.GroupAttendees.Enabled = True
config.GroupAttendees.ReconciliationDelaySeconds = 0
config.GroupAttendees.AutoUpdateSecondsFromNow = 0
config.AccountingCategories.migration = True
config.AccountingPrincipals = ["*"]
@inlineCallbacks
def populate(self):
yield populateCalendarsFrom(self.requirements0, self.theStoreUnderTest(0))
yield populateCalendarsFrom(self.requirements1, self.theStoreUnderTest(1))
requirements0 = {
"user01": None,
"user02": None,
"user03": None,
"user04": None,
"user05": None,
"user06": None,
"user07": None,
"user08": None,
"user09": None,
"user10": None,
}
requirements1 = {
"puser01": None,
"puser02": None,
"puser03": None,
"puser04": None,
"puser05"
|
: None,
"puser06": None,
"puser07": None,
"puser08": None,
"puser09": None,
"puser10": None,
}
@inlineCallbacks
def _createShare(self, shareFrom, shareTo, accept=True):
# Invite
txnindex = 1 if shareFrom[0] == "p" else
|
0
home = yield self.homeUnderTest(txn=self.theTransactionUnderTest(txnindex), name=shareFrom, create=True)
calendar = yield home.childWithName("calendar")
shareeView = yield calendar.inviteUIDToShare(shareTo, _BIND_MODE_READ, "summary")
yield self.commitTransaction(txnindex)
# Accept
if accept:
inviteUID = shareeView.shareUID()
txnindex = 1 if shareTo[0] == "p" else 0
shareeHome = yield self.homeUnderTest(txn=self.theTransactionUnderTest(txnindex), name=shareTo)
shareeView = yield shareeHome.acceptShare(inviteUID)
sharedName = shareeView.name()
yield self.commitTransaction(txnindex)
else:
sharedName = None
returnValue(sharedName)
def attachmentToString(self, attachment):
"""
Convenience to convert an L{IAttachment} to a string.
@param attachment: an L{IAttachment} provider to convert into a string.
@return: a L{Deferred} that fires with the contents of the attachment.
@rtype: L{Deferred} firing C{bytes}
"""
capture = CaptureProtocol()
attachment.retrieve(capture)
return capture.deferred
now = {
"now": DateTime.getToday().getYear(),
"now1": DateTime.getToday().getYear() + 1,
}
data01_1 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data01_1
DTSTART:{now1:04d}0102T140000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
RRULE:FREQ=WEEKLY
SUMMARY:data01_1
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
data01_1_changed = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data01_1
DTSTART:{now1:04d}0102T140000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
RRULE:FREQ=WEEKLY
SUMMARY:data01_1_changed
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
data01_2 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data01_2
DTSTART:{now1:04d}0102T160000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
SUMMARY:data01_2
ORGANIZER:mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:puser02@example.com
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
data01_3 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data01_3
DTSTART:{now1:04d}0102T180000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
SUMMARY:data01_3
ORGANIZER:mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:group02@example.com
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
data02_1 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:uid_data02_1
DTSTART:{now1:04d}0103T140000Z
DURATION:PT1H
CREATED:20060102T190000Z
DTSTAMP:20051222T210507Z
RRULE:FREQ=WEEKLY
SUMMARY:data02_1
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n").format(**now)
data02_2 = """BEGIN:VCA
|
AlexSafatli/EclipseBoardGame
|
record.py
|
Python
|
gpl-2.0
| 2,974
| 0.014459
|
# record.py
# -------------------------
# Fall 2012; Alex Safatli
# -------------------------
# Software package for handling
# the recording and calculating
# of player scores for the Eclipse
# board game, along with keeping
# track of individual matches.
# Imports
import os, cPickle, datetime
# Match class. Encapsulates a match.
class match():
def __init__(self,participants,results):
self.participants = participants # All players that participated and their roles.
self.results = results # Final VP counts,
self.timestamp = datetime.datetime.now()
self.date = self.timestamp.strftime("%Y-%m-%d")
self.maxvp = max(results.values())
self.changes = {}
def __str__(self):
plyrs = ', '.join(self.participants.keys())
return '[%s] %s' (self.date,plyrs)
# Player class. Encapsulates a player.
class player():
def __init__(self,name,score=1200):
self.name = name
self.score = score
def __str__(self):
return self.name
# Score processing.
class scores():
def __init__(self,playerdb,matchdb):
def loadData(fn):
fh = open(fn,'r')
dt = cPickle.load(fh)
fh.close()
return dt
self.playerdb = playerdb
self.matchdb = matchdb
self.players = {}
self.matches = []
if os.path.isfile(playerdb):
self.players = loadData(playerdb)
if os.path.isfile(matchdb):
self.matches = loadData(matchdb)
def update(self):
def dumpData(fn,db):
fh = open(fn,'w')
cPickle.dump(db,fh)
fh.close()
# Update both databases.
dumpData(self.playerdb,self.players)
dumpData(self.matchdb,self.matches)
def numGames(self,player):
# Count the numb
|
er of games for player.
num = 0
if player not in self.players:
return 0
for m in self.matches:
if player in m.participants:
num += 0
return num
def processMatch(self,match):
maxvp = match.maxvp
for player in match.participants:
# See how much of a score incr
|
ease.
vp = match.results[player]
modifier = 1.0 - 0.2*((maxvp-vp)/(maxvp/10.0))
c = self.changeScore(player,modifier)
match.changes[player] = c
self.matches.append(match)
self.update()
def changeScore(self,player,modifier):
if player not in self.players:
# Default player score.
self.players[player] = 100
numgames = self.numGames(player)
incre = int(11*(1-(numgames+1)/1000.0))
if incre < 1:
incre = 1
change = int(incre*modifier)
self.players[player] += change
self.update()
return change
|
vacancy/TensorArtist
|
tartist/nn/tfutils.py
|
Python
|
mit
| 2,691
| 0.00223
|
# -*- coding:utf8 -*-
# File : tfutils.py
# Author : Jiayu
|
an Mao
# Email : maojiayuan@gmail.com
# Date : 1/31/17
#
# This file is part of TensorArtist.
import re
import tensorflow as tf
class TArtGraphKeys:
PLACEHOLDERS = 'placeholders'
TART_VARIABLES = 'tart_variables'
INFERENCE_SUMMARIES = 'inference_summaries'
SCALAR_VARIABLES = 'scalar_variables'
OPTIMIZER_VARIABLES = 'optimizer_variables'
# DEPRECATED: (2017-12-02)
TART_OPERATORS = 'tart_operators'
def clean_name(tensor, suffix=':0'):
name = tensor.name
if name.e
|
ndswith(suffix):
name = name[:-len(suffix)]
return name
def escape_name(tensor):
name = tensor.name
return re.sub(':|/', '_', name)
def clean_summary_suffix(name):
return re.sub('_\d+$', '', name)
def remove_tower_name(name):
return re.sub('^tower/\d+/', '', name)
def format_summary_name(name):
name = clean_summary_suffix(name)
name = remove_tower_name(name)
if 'train/' in name:
name = name.replace('train/', '')
name = 'train/' + name
return name
def assign_variable(var, value, session=None, use_locking=False):
from .graph.env import get_default_env
session = session or get_default_env().session
session.run(var.assign(value, use_locking=use_locking))
def fetch_variable(var, session=None):
from .graph.env import get_default_env
session = session or get_default_env().session
try:
return session.run(var)
except tf.errors.FailedPreconditionError:
session.run(var.initializer)
return session.run(var)
def fetch_variables(var_list, session=None):
from .graph.env import get_default_env
session = session or get_default_env().session
try:
return session.run(var_list)
except tf.errors.FailedPredictionError as e:
raise ValueError('Uninitialized variable(s) encountered in fetch_variables') from e
def assign_variables(var_list_or_dict, value_list=None, session=None, use_locking=False):
from .graph.env import get_default_env
session = session or get_default_env().session
assigns = []
if isinstance(var_list_or_dict, dict):
iterator = var_list_or_dict.items()
else:
iterator = zip(var_list_or_dict, value_list)
for var, value in iterator:
assigns.append(tf.assign(var, value, use_locking=use_locking, name='assign_{}'.format(escape_name(var))))
session.run(tf.group(*assigns))
def extend_collection_list(base, *others):
if base is None:
return others
if type(base) is str:
return (base, ) + others
assert isinstance(base, (tuple, list))
return tuple(base) + others
|
RudolfCardinal/pythonlib
|
cardinal_pythonlib/sqlalchemy/dump.py
|
Python
|
apache-2.0
| 19,483
| 0
|
#!/usr/bin/env python
# cardinal_pythonlib/sqlalchemy/dump.py
"""
===============================================================================
Original code copyright (C) 2009-2021 Rudolf Cardinal (rudolf@pobox.com).
This file is part of cardinal_pythonlib.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================================================================
**Functions to help with large-scale dumping of data from SQLAlchemy systems.**
"""
import datetime
import decimal
import sys
from typing import Any, Callable, Dict, TextIO, Type, Union
import pendulum
# noinspection PyProtectedMember
from sqlalchemy.engine import Connectable, create_engine
from sqlalchemy.engine.base import Engine
from sqlalchemy.engine.default import DefaultDialect # for type hints
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
from sqlalchemy.inspection import inspect
from sqlalchemy.orm.query import Query
from sqlalchemy.sql.base import Executable
from sqlalchemy.sql.elements import BindParameter
from sqlalchemy.sql.expression import select
from sqlalchemy.sql.schema import MetaData, Table
from sqlalchemy.sql.sqltypes import DateTime, NullType, String
from cardinal_pythonlib.file_io import writeline_nl, writelines_nl
from cardinal_pythonlib.logs import get_brace_style_log_with_null_handler
from cardinal_pythonlib.sql.literals import sql_comment
from cardinal_pythonlib.sqlalchemy.dialect import SqlaDialectName
from cardinal_pythonlib.sqlalchemy.orm_inspect import walk_orm_tree
from cardinal_pythonlib.sqlalchemy.schema import get_table_names
log = get_brace_style_log_with_null_handler(__name__)
SEP1 = sql_comment("=" * 76)
SEP2 = sql_comment("-" * 76)
# =============================================================================
# Dump functions: get DDL and/or data as SQL commands
# =============================================================================
def dump_connection_info(engine: Engine, fileobj: TextIO = sys.stdout) -> None:
"""
Dumps some connection info, as an SQL comment. Obscures passwords.
Args:
engine: the SQLAlchemy :class:`Engine` to dump metadata information
from
fileobj: the file-like object (default ``sys.stdout``) to write
information to
"""
meta = MetaData(bind=engine)
writeline_nl(fileobj, sql_comment(f'Database info: {meta}'))
def dump_ddl(metadata: MetaData,
dialect_name: str,
fileobj: TextIO = sys.stdout,
checkfirst: bool = True) -> None:
"""
Sends schema-creating DDL from the metadata to the dump engine.
This makes ``CREATE TABLE`` statements.
Args:
metadata: SQLAlchemy :class:`MetaData`
dialect_name: string name of SQL dialect to generate DDL in
|
fileobj: file-like object to send DDL to
checkfirst: if ``True``, use ``CREATE TABLE IF NOT EXISTS`` or
equivalent.
"""
# http://docs.sqlalchemy.org/en/rel_0_8/faq.html#how-can-i-get-the-create-table-drop-table-output-as-a-string # noqa
|
# https://stackoverflow.com/questions/870925/how-to-generate-a-file-with-ddl-in-the-engines-sql-dialect-in-sqlalchemy # noqa
# https://github.com/plq/scripts/blob/master/pg_dump.py
# noinspection PyUnusedLocal
def dump(querysql, *multiparams, **params):
compsql = querysql.compile(dialect=engine.dialect)
writeline_nl(fileobj, f"{compsql};")
writeline_nl(fileobj,
sql_comment(f"Schema (for dialect {dialect_name}):"))
engine = create_engine(f"{dialect_name}://",
strategy="mock", executor=dump)
metadata.create_all(engine, checkfirst=checkfirst)
# ... checkfirst doesn't seem to be working for the mock strategy...
# http://docs.sqlalchemy.org/en/latest/core/metadata.html
# ... does it implement a *real* check (impossible here), rather than
# issuing CREATE ... IF NOT EXISTS?
def quick_mapper(table: Table) -> Type[DeclarativeMeta]:
"""
Makes a new SQLAlchemy mapper for an existing table.
See
https://www.tylerlesmann.com/2009/apr/27/copying-databases-across-platforms-sqlalchemy/
Args:
table: SQLAlchemy :class:`Table` object
Returns:
a :class:`DeclarativeMeta` class
""" # noqa
# noinspection PyPep8Naming
Base = declarative_base()
class GenericMapper(Base):
__table__ = table
# noinspection PyTypeChecker
return GenericMapper
class StringLiteral(String):
"""
Teach SQLAlchemy how to literalize various things.
See
https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query
"""
def literal_processor(self,
dialect: DefaultDialect) -> Callable[[Any], str]:
super_processor = super().literal_processor(dialect)
def process(value: Any) -> str:
log.debug("process: {!r}", value)
if isinstance(value, int):
return str(value)
if not isinstance(value, str):
value = str(value)
result = super_processor(value)
if isinstance(result, bytes):
result = result.decode(dialect.encoding)
return result
return process
# noinspection PyPep8Naming
def make_literal_query_fn(dialect: DefaultDialect) -> Callable[[str], str]:
DialectClass = dialect.__class__
# noinspection PyClassHasNoInit,PyAbstractClass
class LiteralDialect(DialectClass):
# https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query # noqa
colspecs = {
# prevent various encoding explosions
String: StringLiteral,
# teach SA about how to literalize a datetime
DateTime: StringLiteral,
# don't format py2 long integers to NULL
NullType: StringLiteral,
}
def literal_query(statement: str) -> str:
"""
NOTE: This is entirely insecure. DO NOT execute the resulting
strings.
"""
# https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query # noqa
if isinstance(statement, Query):
statement = statement.statement
return statement.compile(
dialect=LiteralDialect(),
compile_kwargs={'literal_binds': True},
).string + ";"
return literal_query
# noinspection PyProtectedMember
def get_literal_query(statement: Union[Query, Executable],
bind: Connectable = None) -> str:
"""
Takes an SQLAlchemy statement and produces a literal SQL version, with
values filled in.
As per
https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query
Notes:
- for debugging purposes *only*
- insecure; you should always separate queries from their values
- please also note that this function is quite slow
Args:
statement: the SQL statement (a SQLAlchemy object) to use
bind: if the statement is unbound, you will need to specify an object
here that supports SQL execution
Returns:
a string literal version of the query.
""" # noqa
# log.debug("statement: {!r}", statement)
# log.debug("statement.bind: {!r}", statement.bind)
if isinstance(statement, Query):
if bind is None:
bind = statement.session.get_bind(statement._mapper_zero_or_none())
statement = statement.statement
elif bind is None:
bind = statement.bind
if bind is None: # despite all that
raise ValueError("Attempt to call get
|
dNG-git/mp_core
|
src/dNG/data/upnp/search/common_mp_entry_segment.py
|
Python
|
gpl-2.0
| 16,034
| 0.005987
|
# -*- coding: utf-8 -*-
"""
MediaProvider
A device centric multimedia solution
----------------------------------------------------------------------------
(C) direct Netware Group - All rights reserved
https://www.direct-netware.de/redirect?mp;core
The following license agreement remains valid unless any additions or
changes are being made by direct Netware Group in a written form.
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
----------------------------------------------------------------------------
https://www.direct-netware.de/redirect?licenses;gpl
----------------------------------------------------------------------------
#echo(mpCoreVersion)#
#echo(__FILEPATH__)#
"""
from dNG.data.upnp.resources.mp_entry import MpEntry
from dNG.database.condition_definition import ConditionDefinition
from dNG.database.connection import Connection
from dNG.database.sort_definition import SortDefinition
from dNG.plugins.hook import Hook
from dNG.runtime.value_exception import ValueException
from .abstract_segment import AbstractSegment
from .criteria_definition import CriteriaDefinition
class CommonMpEntrySegment(AbstractSegment):
"""
"CommonMpEntrySegment" provides UPnP searches for "MpEntry" instances.
:author: direct Netware Group et al.
:copyright: direct Netware Group - All rights reserved
:package: mp
:subpackage: core
:since: v0.2.00
:license: https://www.direct-netware.de/redirect?licenses;gpl
GNU General Public License 2
"""
def __init__(self):
"""
Constructor __init__(SearchResources)
|
:since: v0.2.00
"""
AbstractSegment.__init__(self)
self.condition_definition = None
"""
Database query condition definition
"""
self.pre_condition_failed = False
"""
True if a pre-condition fails
"""
#
def _ensure_condition_definition(self):
"""
Checks and sets the database query condition definition based on the defined
UPnP criteria definition specified.
|
:since: v0.2.00
"""
if ((not self.pre_condition_failed) and self.condition_definition is None):
self.condition_definition = self._rewrite_criteria_definition_walker(self.criteria_definition)
#
#
def get_count(self):
"""
Returns the total number of matches in this UPnP search segment.
:return: (int) Number of matches
:since: v0.2.00
"""
self._ensure_condition_definition()
return (0
if (self.pre_condition_failed) else
MpEntry.get_entries_count_with_condition(self.condition_definition)
)
#
def get_list(self):
"""
Returns the list of UPnP resource search segment results as defined by
"offset" and "limit".
:return: (list) List of search segment results
:since: v0.2.00
"""
_return = [ ]
self._ensure_condition_definition()
sort_definition = SortDefinition()
# @TODO: if (len(self.sort_tuples) > 0): MpEntry._db_append_didl_field_sort_definition
sort_definition.append("title", SortDefinition.ASCENDING)
if (not self.pre_condition_failed):
with Connection.get_instance():
entries = MpEntry.load_entries_list_with_condition(self.condition_definition,
self.offset,
self.limit,
sort_definition
)
for entry in entries:
if (self.client_user_agent is not None): entry.set_client_user_agent(self.client_user_agent)
_return.append(entry)
#
#
#
return _return
#
def _get_property_attribute_name(self, _property):
"""
Returns the database attribute name for the given lower-case property.
:param property: Lower-case property
:return: (str) Database attribute name
:since: v0.2.00
"""
_return = None
if (_property == "@id"): _return = "id"
elif (_property == "@refid"): _return = "resource"
elif (_property in ( "dc:date", "upnp:recordedStartDateTime" )): _return = "time_sortable"
elif (_property == "dc:description"): _return = "description"
elif (_property == "dc:title"): _return = "title"
elif (_property == "res@size"): _return = "size"
elif (_property == "upnp:class"): _return = "identity"
if (_return is None): raise ValueException("UPnP property '{0}' not defined".format(_property))
return _return
#
def _rewrite_criteria_definition_walker(self, criteria_definition):
"""
Adds the specified criteria to the given database query condition
definition.
:param criteria_definition: Criteria definition instance
:return: (object) Database condition definition instance
:since: v0.2.00
"""
condition_concatenation = (ConditionDefinition.AND
if (criteria_definition.get_concatenation() == CriteriaDefinition.AND) else
ConditionDefinition.OR
)
_return = ConditionDefinition(condition_concatenation)
for criteria in criteria_definition.get_criteria():
condition_method = None
criteria_property = criteria.get("property")
criteria_type = criteria['type']
criteria_value = None
if (criteria_property == "@id"
and "value" in criteria
and "://" in criteria['value']
): criteria_value = criteria['value'].split("://", 1)[1]
if (criteria_property == "@refid" and
criteria_type in ( CriteriaDefinition.TYPE_DEFINED_MATCH, CriteriaDefinition.TYPE_NOT_DEFINED_MATCH)
):
value_list = Hook.call("mp.upnp.MpResource.getReferenceDbIdentities")
if (type(value_list) is list
and len(value_list) > 0
):
if (criteria_type == CriteriaDefinition.TYPE_DEFINED_MATCH): _return.add_in_list_match_condition("identity", value_list)
else: _return.add_not_in_list_match_condition("identity", value_list)
elif (criteria_type == CriteriaDefinition.TYPE_DEFINED_MATCH):
self.pre_condition_failed = True
break
#
elif (criteria_type == CriteriaDefinition.TYPE_SUB_CRITERIA):
condition_definition = self._rewrite_criteria_definition_walker(criteria['criteria_definition'])
if (self.pre_condition_failed): break
else: _return.add_sub_condition(condition_definition)
elif (criteria_type == CriteriaDefinition.TYPE_CASE_INSENSITIVE_MATCH):
condition_method = _return.add_case_insensitive_match_condition
criteria_value = "*{0}*".format(criteria['value'])
elif (criteria_type == CriteriaDefinition.TYPE_CASE_INSENSITIVE_NO_MATCH):
condition_method = _return.add_case_insensitive_no_match_condition
criteria_value = "*{0}*".format(criteria['value'])
elif (criteria_type == CriteriaDefinition.TYPE_DEFINED_MATCH):
attribute = self._get_property_attribute_name(criteria['property'])
|
caphrim007/ansible
|
lib/ansible/plugins/action/__init__.py
|
Python
|
gpl-3.0
| 50,188
| 0.003447
|
# Copyright: (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_funct
|
ion)
__metaclass__ = type
import base64
import json
import os
import random
import re
import stat
import tempfile
import time
from abc import ABCMeta, abstractmethod
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleActionSkip, AnsibleActionFail
from ansible.executor.module_common import modify_module
from ansible.module_utils.json_utils import _filter_non_json_lines
from ansible.module_utils.six import binary_typ
|
e, string_types, text_type, iteritems, with_metaclass
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.parsing.utils.jsonify import jsonify
from ansible.release import __version__
from ansible.utils.unsafe_proxy import wrap_var
from ansible.vars.clean import remove_internal_keys
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionBase(with_metaclass(ABCMeta, object)):
'''
This class is the base class for all action plugins, and defines
code common to all actions. The base class handles the connection
by putting/getting files and executing commands based on the current
action in use.
'''
# A set of valid arguments
_VALID_ARGS = frozenset([])
def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj):
self._task = task
self._connection = connection
self._play_context = play_context
self._loader = loader
self._templar = templar
self._shared_loader_obj = shared_loader_obj
self._cleanup_remote_tmp = False
self._supports_check_mode = True
self._supports_async = False
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
self._used_interpreter = None
@abstractmethod
def run(self, tmp=None, task_vars=None):
""" Action Plugins should implement this method to perform their
tasks. Everything else in this base class is a helper method for the
action plugin to do that.
:kwarg tmp: Deprecated parameter. This is no longer used. An action plugin that calls
another one and wants to use the same remote tmp for both should set
self._connection._shell.tmpdir rather than this parameter.
:kwarg task_vars: The variables (host vars, group vars, config vars,
etc) associated with this task.
:returns: dictionary of results from the module
Implementors of action modules may find the following variables especially useful:
* Module parameters. These are stored in self._task.args
"""
result = {}
if tmp is not None:
result['warning'] = ['ActionModule.run() no longer honors the tmp parameter. Action'
' plugins should set self._connection._shell.tmpdir to share'
' the tmpdir']
del tmp
if self._task.async_val and not self._supports_async:
raise AnsibleActionFail('async is not supported for this task.')
elif self._play_context.check_mode and not self._supports_check_mode:
raise AnsibleActionSkip('check mode is not supported for this task.')
elif self._task.async_val and self._play_context.check_mode:
raise AnsibleActionFail('check mode and async cannot be used on same task.')
# Error if invalid argument is passed
if self._VALID_ARGS:
task_opts = frozenset(self._task.args.keys())
bad_opts = task_opts.difference(self._VALID_ARGS)
if bad_opts:
raise AnsibleActionFail('Invalid options for %s: %s' % (self._task.action, ','.join(list(bad_opts))))
if self._connection._shell.tmpdir is None and self._early_needs_tmp_path():
self._make_tmp_path()
return result
def _remote_file_exists(self, path):
cmd = self._connection._shell.exists(path)
result = self._low_level_execute_command(cmd=cmd, sudoable=True)
if result['rc'] == 0:
return True
return False
def _configure_module(self, module_name, module_args, task_vars=None):
'''
Handles the loading and templating of the module code through the
modify_module() function.
'''
if task_vars is None:
task_vars = dict()
# Search module path(s) for named module.
for mod_type in self._connection.module_implementation_preferences:
# Check to determine if PowerShell modules are supported, and apply
# some fixes (hacks) to module name + args.
if mod_type == '.ps1':
# win_stat, win_file, and win_copy are not just like their
# python counterparts but they are compatible enough for our
# internal usage
if module_name in ('stat', 'file', 'copy') and self._task.action != module_name:
module_name = 'win_%s' % module_name
# Remove extra quotes surrounding path parameters before sending to module.
if module_name in ('win_stat', 'win_file', 'win_copy', 'slurp') and module_args and hasattr(self._connection._shell, '_unquote'):
for key in ('src', 'dest', 'path'):
if key in module_args:
module_args[key] = self._connection._shell._unquote(module_args[key])
module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, mod_type)
if module_path:
break
else: # This is a for-else: http://bit.ly/1ElPkyg
# Use Windows version of ping module to check module paths when
# using a connection that supports .ps1 suffixes. We check specifically
# for win_ping here, otherwise the code would look for ping.ps1
if '.ps1' in self._connection.module_implementation_preferences:
ping_module = 'win_ping'
else:
ping_module = 'ping'
module_path2 = self._shared_loader_obj.module_loader.find_plugin(ping_module, self._connection.module_implementation_preferences)
if module_path2 is not None:
raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
else:
raise AnsibleError("The module %s was not found in configured module paths. "
"Additionally, core modules are missing. If this is a checkout, "
"run 'git pull --rebase' to correct this problem." % (module_name))
# insert shared code and arguments into the module
final_environment = dict()
self._compute_environment_string(final_environment)
(module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args, self._templar,
task_vars=task_vars,
module_compression=self._play_context.module_compression,
async_timeout=self._task.async_val,
become=self._play_context.become,
become_method=self._play_context.become_method,
become_user=self._play_context.become_user,
become_password=self._play_con
|
lumened/touch-flux
|
src/demo.py
|
Python
|
gpl-2.0
| 805
| 0.012422
|
import time, os
import io
import time
import picamera
os.putenv('SDL_VIDEODRIVER', 'fbcon')
os.putenv('SDL_FBDEV' , '/dev/fb1')
os.putenv('SDL_MOUSEDRV' , 'TSLIB')
os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen')
os.putenv('SDL_AUDIODR
|
IVER' , 'alsa')
# Create an in-memory stream
#my_stream = io.BytesIO()
#with picamera.PiCamera() as camera:
path="/home/pi/videos"
files = os.listdir(path)
files = sorted(files)
last_file = files[-1]
# index = -1
i = 0
while i < len(last_file):
if last_file[i].isdigit():
break
i = i+1;
start_index = i
while i < len(last_file):
if not last_file[i].isdigit():
break
i = i+1
end_index = i
print files
print last_file
print start_index
print end_index
#end_index = end
|
_index-1
print int(last_file[start_index:end_index])
|
danforthcenter/plantcv
|
plantcv/plantcv/y_axis_pseudolandmarks.py
|
Python
|
mit
| 9,161
| 0.002729
|
# Function to scan for pseudolandmarks along the y-axis
import cv2
import os
import numpy as np
from plantcv.plantcv._debug import _debug
from plantcv.plantcv import params
from plantcv.plantcv import outputs
from plantcv.plantcv import fatal_error
def y_axis_pseudolandmarks(img, obj, mask, label="default"):
"""
Divide up object contour into 19 equidistant segments and generate landmarks for each
Inputs:
img = This is a copy of the original plant image generated using np.copy if debug is true it will be drawn on
obj = a contour of the plant object (this should be output from the object_composition.py fxn)
mask = this is a binary image. The object should be white and the background should be black
label = optional label parameter, modifies the variable name of observations recorded
Returns:
left = List of landmarks within the left side
right = List of landmarks within the right side
center_h = List of landmarks within the center
:param img: numpy.ndarray
:param obj: list
:param mask: numpy.ndarray
:param label: str
:return left: list
:return right: list
:return center_h: list
"""
# Lets get some landmarks scanning along the y-axis
if not np.any(obj):
return ('NA', 'NA'), ('NA', 'NA'), ('NA', 'NA')
x, y, width, height = cv2.boundingRect(obj)
extent = height
# Outputs
left = []
right = []
center_h = []
left_list = []
right_list = []
center_h_list = []
# If height is greater than 21 pixels make 20 increments (5% intervals)
if extent >= 21:
inc = int(extent / 21)
# Define variable for max points and min points
pts_max = []
pts_min = []
# Get max and min points for each of the intervals
for i in range(1, 21):
if i == 1:
pt_max = y
pt_min = y + (inc * i)
else:
pt_max = y + (inc * (i - 1))
pt_min = y + (inc * i)
# Put these in an array
pts_max.append(pt_max)
pts_min.append(pt_min)
# Combine max and min into a set of tuples
point_range = list(zip(pts_max, pts_min))
# define some list variables to fill
row_median = []
row_ave = []
max_width = []
left_points = []
right_points = []
y_vals = []
x_centroids = []
y_centroids = []
# For each of the 20 intervals
for pt in point_range:
# Get the lower and upper bounds
# (lower and higher in terms of value; low point is actually towards top of photo, higher is lower of photo)
low_point, high_point = pt
# Get all rows within these two points
rows = []
lps = []
rps = []
# Get a continuous list of the values between the top and the bottom of the interval save as vals
vals = list(range(low_point, high_point))
# For each row... get all coordinates from object contour that match row
for v in vals:
# Value is all entries that match the row
value = obj[v == obj[:, 0, 1]]
if len(value) > 0:
# Could potentially be more than two points in all contour in each pixel row
# Grab largest x coordinate (column)
largest = value[:, 0, 0].max()
# Grab smallest x coordinate (column)
smallest = value[:, 0, 0].min()
# Take the difference between the two (this is how far across the object is on this plane)
row_width = largest - smallest
# Append this value to a list
rows.append(row_width)
lps.append(smallest)
rps.append(largest)
if len(value) == 0:
row_width = 1
rows.append(row_width)
lps.append(1)
rps.append(1)
# For each of the points find the median and average width
row_median.append(np.median(np.array(rows)))
row_ave.append(np.mean(np.array(rows)))
max_width.append(np.max(np.array(rows)))
left_points.append(np.mean(smallest))
right_points.append(np.mean(largest))
yval = int((high_point + low_point) / 2)
y_vals.append(yval)
# Make a copy of the mask; we want to get landmark points from this
window = np.copy(mask)
window[:low_point] = 0
window[high_point:] = 0
s = cv2.moments(window)
# Centroid (center of mass x, center of mass y)
if largest - smallest > 3:
if s['m00'] > 0.001:
smx, smy = (s['m10'] / s['m00'], s['m01'] / s['m00'])
x_centroids.append(int(smx))
y_centroids.append(int(smy))
if s['m00'] < 0.001:
smx, smy = (s['m10'] / 0.001, s['m0
|
1'] / 0.001)
x_centroids.append(int(smx))
y_centroids.append(int(smy))
else:
smx = (largest + smallest) / 2
smy = yval
x_centroids.append(int(smx))
y_centroids.append(int(smy))
left = list(zip(left_points, y_vals))
left = np.array(left)
left.shape = (20, 1, 2)
right = list(
|
zip(right_points, y_vals))
right = np.array(right)
right.shape = (20, 1, 2)
center_h = list(zip(x_centroids, y_centroids))
center_h = np.array(center_h)
center_h.shape = (20, 1, 2)
img2 = np.copy(img)
for i in left:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (255, 0, 0), -1)
for i in right:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (255, 0, 255), -1)
for i in center_h:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (0, 79, 255), -1)
_debug(visual=img2,
filename=os.path.join(params.debug_outdir, (str(params.device) + '_y_axis_pseudolandmarks.png')))
elif extent < 21:
# If the length of the object is less than 20 pixels just make the object a 20 pixel rectangle
x, y, width, height = cv2.boundingRect(obj)
y_coords = list(range(y, y + 20))
l_points = [x] * 20
left = list(zip(l_points, y_coords))
left = np.array(left)
left.shape = (20, 1, 2)
r_points = [x + width] * 20
right = list(zip(r_points, y_coords))
right = np.array(right)
right.shape = (20, 1, 2)
m = cv2.moments(mask, binaryImage=True)
# Centroid (center of mass x, center of mass y)
if m['m00'] == 0:
fatal_error('Check input parameters, first moment=0')
else:
cmx, cmy = (m['m10'] / m['m00'], m['m01'] / m['m00'])
c_points = [cmx] * 20
center_h = list(zip(c_points, y_coords))
center_h = np.array(center_h)
center_h.shape = (20, 1, 2)
img2 = np.copy(img)
for i in left:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (255, 0, 0), -1)
for i in right:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (255, 0, 255), -1)
for i in center_h:
x = i[0, 0]
y = i[0, 1]
cv2.circle(img2, (int(x), int(y)), params.line_thickness, (0, 79, 255), -1)
_debug(visual=img2,
filename=os.path.join(params.debug_outdir, (str(params.device) + '_y_axis_pseudolandmarks.png')))
# Store into global measurements
for pt in left:
left_list.append(pt[0].tolist())
for pt in right:
|
OpenMined/PySyft
|
packages/syft/src/syft/core/node/common/node_service/generic_payload/syft_message.py
|
Python
|
apache-2.0
| 3,573
| 0.002239
|
# stdlib
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
# third party
from nacl.signing import VerifyKey
from pydantic import BaseModel
from pydantic.error_wrappers import ValidationError as PydanticValidationError
# relative
from .....common.message import ImmediateSyftMessage
from .....common.message import SignedMessage
from .....common.uid import UID
from .....io.address import Address
from ....abstract.node_service_interface import NodeServiceInterface
from ....common.exceptions import AuthorizationError
from ....common.exceptions import BadPayloadException
from ....common.exceptions import PermissionsNotDefined
# Inner Payload message using Pydantic.
class Payload(BaseModel):
class Config:
orm_mode = True
class RequestPayload(Payload):
pass
class ReplyPayload(Payload):
pass
class NewSyftMessage(ImmediateSyftMessage):
"""A base class from which all message classes should inherit.
Note:
This will eventually replace the old `SyftMessage` class.
"""
__attr_allowlist__ = ["id", "address", "reply_to", "reply", "msg_id", "kwargs"]
signed_type = SignedMessage
request_payload_type = Requ
|
estPayload
reply_payload_type = ReplyPayload
def __init__(
self,
address: Address,
kwargs: Optional[Dict[str, Any]] = None,
msg_id: Optional[UID] = None,
reply_to: Optional[Address] = None,
reply: bool = False,
) -> None:
super().__init__(address=address, msg_id=m
|
sg_id)
self.reply_to = reply_to
self.reply = reply
self.kwargs = kwargs if kwargs else {}
@property
def payload(self) -> Payload:
kwargs_dict = {}
if hasattr(self.kwargs, "upcast"):
kwargs_dict = self.kwargs.upcast() # type: ignore
else:
kwargs_dict = self.kwargs # type: ignore
try:
# If it's not a reply message then load kwargs as a proper request payload.
if not self.reply:
return self.request_payload_type(**kwargs_dict)
# If it's a reply message, then load kwargs as a proper reply payload.
else:
return self.reply_payload_type(**kwargs_dict)
except PydanticValidationError:
raise BadPayloadException
def run(
self, node: NodeServiceInterface, verify_key: Optional[VerifyKey] = None
) -> ReplyPayload:
raise NotImplementedError
def get_permissions(self) -> List:
"""Returns the list of permission classes applicable to the given message."""
raise NotImplementedError
def check_permissions(
self, node: NodeServiceInterface, verify_key: Optional[VerifyKey] = None
) -> None:
"""Check if the user has relevant permissions to run this message.
Args:
node (NodeServiceInterface): node interface used to invoke this message.
verify_key (Optional[VerifyKey], optional): user signed verification key. Defaults to None.
Raises:
AuthorizationError: Error when one of the permission is denied.
"""
if not len(self.get_permissions()):
raise PermissionsNotDefined
for permission_class in self.get_permissions():
if not permission_class().has_permission(
msg=self, node=node, verify_key=verify_key
):
raise AuthorizationError(
f"You don't have access to perform {self} action."
)
|
waynew/flaskpypi
|
tests/test_flaskpypi.py
|
Python
|
bsd-3-clause
| 752
| 0.00266
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_flaskpypi
----------------------------------
Tests for `flaskpypi` module.
"""
import pytest
from flaskpypi import flaskpypi
# Code from https://wiki.
|
python.org/moin/PyPISimple
from xml.etree import ElementTree
from urllib.request import urlopen
def get_distributions(simple_index='https://pypi.python.org/simple/'):
with urlopen(simple_index) as f:
tree = ElementTree.parse(f)
return [a.text for a in tree.iter('a')]
def scrape_links(dist, simple_index='https://pypi.p
|
ython.org/simple/'):
with urlopen(simple_index + dist + '/') as f:
tree = ElementTree.parse(f)
return [a.attrib['href'] for a in tree.iter('a')]
def test_this_is_a_test():
assert True
|
shakamunyi/neutron-vrrp
|
neutron/db/migration/alembic_migrations/versions/19180cf98af6_nsx_gw_devices.py
|
Python
|
apache-2.0
| 3,313
| 0.000906
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""nsx_gw_devices
Revision ID: 19180cf98af6
Revises: 117643811bca
Create Date: 2014-02-26 02:46:26.151741
"""
# revision identifiers, used by Alembic.
revision = '19180cf98af6'
down_revision = '117643811bca'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.nicira.NeutronPlugin.NvpPluginV2',
'neutron.plugins.nicira.NeutronServicePlugin.NvpAdvancedPlugin',
'neutron.plugins.vmware.plugin.NsxPlugin',
'neutron.plugins.vmware.plugin.NsxServicePlugin'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.create_table(
'networkgatewaydevicereferences',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_gateway_id', sa.String(length=36), nullable=True),
sa.Column('interface_name', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['network_gateway_id'], ['networkgateways.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', 'network_gateway_id', 'interface_name'))
# Copy data from networkgatewaydevices into networkgatewaydevicereference
op.execute("INSERT INTO networkgatewaydevicereferences SELECT "
"id, network_gateway_id, interface_name FROM "
"networkgatewaydevices")
# drop networkgatewaydevices
op.drop_table('networkgatewaydevices')
op.create_table(
'networkgatewaydevices',
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('nsx_id', sa.String(length=36), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Col
|
umn('connector_type', sa.String(length=10), nullable=True),
sa.Column('connector_ip', sa.String(length=64), nullable=True),
sa.Column('status', sa.String(length=16), nullable=True),
sa.PrimaryKeyConstraint('id'))
# Create a networkgatewaydevice for each existing reference.
# For exis
|
ting references nsx_id == neutron_id
# Do not fill conenctor info as they would be unknown
op.execute("INSERT INTO networkgatewaydevices (id, nsx_id, tenant_id) "
"SELECT gw_dev_ref.id, gw_dev_ref.id as nsx_id, tenant_id "
"FROM networkgatewaydevicereferences AS gw_dev_ref "
"INNER JOIN networkgateways AS net_gw ON "
"gw_dev_ref.network_gateway_id=net_gw.id")
def downgrade(active_plugins=None, options=None):
pass
|
mh03r932/raspi2dht11
|
examples/google_spreadsheet_twosens.py
|
Python
|
mit
| 6,208
| 0.016591
|
#!/usr/bin/python
# Google Spreadsheet DHT Sensor Data-logging Example
# Depends on the 'gspread' and 'oauth2client' package being installed. If you
# have pip installed execute:
# sudo pip install gspread oauth2client
# Also it's _very important_ on the Raspberry Pi to install the python-openssl
# package because the version of Python is a bit old and can fail with Google's
# new OAuth2 based authentication. Run the following command to install the
# the package:
# sudo apt-get update
# sudo apt-get install python-openssl
# Copyright (c) 2014 Adafruit Industries
# Author: Tony DiCola
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import json
import sys
import time
import datetime
import Adafruit_DHT
import gspread
from oauth2client.client import SignedJwtAssertionCredentials
# Type of sensor, can be Adafruit_DHT.DHT11, Adafruit_DHT.DHT22, or Adafruit_DHT.AM2302.
DHT_TYPE = Adafruit_DHT.DHT11
# Example of sensor connected to Raspberry Pi pin 23
DHT_PIN = 5
DHT_SECOND_PIN = 6
# Example of sensor connected to Beaglebone Black pin P8_11
#DHT_PIN = 'P8_11'
# Google Docs OAuth credential JSON file. Note that the process for authenticating
# with Google docs has changed as of ~April 2015. You _must_ use OAuth2 to log
# in and authenticate with the gspread library. Unfortunately this process is much
# more complicated than the old process. You _must_ carefully follow the steps on
# this page to create a new OAuth service in your Google developer console:
# http://gspread.readthedocs.org/en/latest/oauth2.html
#
# Once you've followed the steps above you should have downloaded a .json file with
# your OAuth2 credentials. This file has a name like SpreadsheetData-<gibberish>.json.
# Place that file in the same directory as this python script.
#
# Now one last _very important_ step before updating the spreadsheet will work.
# Go to your spreadsheet in Google Spreadsheet and share it to the email address
# inside the 'client_email' setting in the SpreadsheetData-*.json file. For example
# if the client_email setting inside the .json file has an email address like:
# 149345334675-md0qff5f0kib41meu20f7d1habos3qcu@developer.gserviceaccount.com
# Then use the File -> Share... command in the spreadsheet to share it with read
# and write acess to the email address above. If you don't do this step then the
# updates to the sheet will fail!
GDOCS_OAUTH_JSON = 'Humidity-Logger-96344a3d42df.json'
# Google Docs spreadsheet name.
GDOCS_SPREADSHEET_NAME = 'Luftfeuchtigkeitslog'
# How long to wait (in seconds) between measurements.
FREQUENCY_SECONDS = 900
def login_open_sheet(oauth_key_file, spreadsheet):
"""Connect to Google Docs spreadsheet and return the first worksheet."""
try:
json_key = json.load(open(oauth_key_file))
credentials = SignedJwtAssertionCredentials(json_key['client_email'],
json_key['private_key'],
['https://spreadsheets.google.com/feeds'])
gc = gspread.authorize(credentials)
worksheet = gc.open(spreadsheet).sheet1
return worksheet
except Exception as ex:
print 'Unable to login and get spreadsheet. Check OAuth credentials, spreadsheet name, and make sure spreadsheet is shared to the client_email address in the OAuth .json file!'
print 'Google sheet login failed with error:', ex
sys.exit(1)
def getReadingFromSensor(inputpin):
attempts = 0
while attempts < 3:
# Attempt to get sensor reading.
humidity
|
, temp = Adafruit_DHT.read(DHT_TYPE, inputpin)
# Reading the sensor depends on timing so to make sure we are not busy anymore insert a little sleep
# Skip to the next reading if a valid measurement couldn't be taken.
# T
|
his might happen if the CPU is under a lot of load and the sensor
# can't be reliably read (timing is critical to read the sensor).
if humidity is None or temp is None:
time.sleep(2)
attempts += 1
continue
print 'Temperature{0}: {1:0.1f} C'.format(inputpin,temp)
print 'Humidity{0}: {1:0.1f} %'.format(inputpin,humidity)
return temp, humidity
print 'Logging sensor measurements to {0} every {1} seconds.'.format(GDOCS_SPREADSHEET_NAME, FREQUENCY_SECONDS)
print 'Press Ctrl-C to quit.'
worksheet = None
while True:
# Login if necessary.
if worksheet is None:
worksheet = login_open_sheet(GDOCS_OAUTH_JSON, GDOCS_SPREADSHEET_NAME)
temp, humidity = getReadingFromSensor(DHT_PIN)
time.sleep(1)
temp2, humidity2 = getReadingFromSensor(DHT_SECOND_PIN)
# Append the data to a logfile
text_file = open('TempLog.csv', 'a')
text_file.write('{},{},{},{},{}\n'.format(datetime.datetime.now(),temp,humidity,temp2,humidity2))
text_file.close()
# Append the data in the spreadsheet, including a timestamp
try:
worksheet.append_row((datetime.datetime.now(), temp, humidity, temp2, humidity2))
except:
# Error appending data, most likely because credentials are stale.
# Null out the worksheet so a login is performed at the top of the loop.
print 'Append error, logging in again'
worksheet = None
time.sleep(FREQUENCY_SECONDS)
continue
# Wait 30 seconds before continuing
print 'Wrote a row to {0}'.format(GDOCS_SPREADSHEET_NAME)
time.sleep(FREQUENCY_SECONDS)
|
krfkeith/enough
|
gui/Keymap.py
|
Python
|
gpl-3.0
| 10,827
| 0.002586
|
# Copyright (c) 2007 Enough Project.
# See LICENSE for details.
"""The things a keymap does:
1. Pass given keys to 'next' keymap (considered more 'specific') which
is stronger/overrides the keymap itself.
2. If the next keymap does not know the key, then it tries to handle
it itself according to a map it holds that maps specific (modifier,
key) to funcs, and then, also according to a map of broader groups
to funcs."""
import pygame
import functools
import itertools
from lib.observer import Observable
def discard_eventarg(func):
@functools.wraps(func)
def handler(event):
return func()
return handler
def filter_eventtypes(event_types):
"""Returns a decorator that takes pygame events and passes them on
to the decorated function only if they are within the given event
types"""
def decorate(func):
@functools.wraps(func)
def new_func(event):
if event.type in event_types:
return func(event)
return new_func
return decorate
def handler(include_keyup=False,
include_event=False):
def decorate(func):
if not include_event:
func = discard_eventarg(func)
if not include_keyup:
func = filter_eventtypes([pygame.KEYDOWN])(func)
return func
return decorate
keydown_noarg = handler()
def mod_name(x):
mods = []
if x & pygame.KMOD_CTRL:
mods.append('Control')
if x & pygame.KMOD_SHIFT:
mods.append('Shift')
if x & pygame.KMOD_META:
mods.append('Winkey')
if x & pygame.KMOD_ALT:
mods.append('Alt')
return ' + '.join(mods)
class Key(object):
def __init__(self, modifier, key):
self.modifier = modifier
self.key = key
def _essence(self):
return (self.modifier, self.key)
def __cmp__(self, other):
if isinstance(other, Key):
return cmp(self._essence(), other._essence())
else:
return cmp(type(self), type(other))
def __hash__(self):
return hash(self._essence())
def name(self):
key_name = pygame.key.name(self.key)
if self.modifier:
return '%s+%s' % (mod_name(self.modifier), key_name)
else:
return key_name
__repr__ = name
@classmethod
def from_pygame_event(cls, event):
mod = 0
if event.mod & pygame.KMOD_CTRL:
mod |= pygame.KMOD_CTRL
elif event.mod & pygame.KMOD_ALT:
mod |= pygame.KMOD_ALT
elif event.mod & pygame.KMOD_SHIFT:
mod |= pygame.KMOD_SHIFT
return cls(mod, event.key)
class Group(object):
def __init__(self, name, allowed_modifiers, keys):
self.allowed_modifiers = set(allowed_modifiers)
self.keys = set(keys)
self._name = name
def name(self):
return self._name
def overlaps(self, key):
if isinstance(key, Group):
return bool(self.keys & key.keys) and bool(self.allowed_modifiers &
key.allowed_modifiers)
elif isinstance(key, Key):
return key in self
else:
return NotImplemented
def __contains__(self, key):
return key.key in self.keys and key.modifier in self.allowed_modifiers
# TODO: Its bad to assume anything about K_* here...
import string
alphanumeric = Group('Alphanumeric', [pygame.KMOD_SHIFT, 0],
[ord(x) for x in string.letters+string.digits] +
[pygame.K_UNDERSCORE, pygame.K_MINUS])
all_printable = Group('Printable symbols', [pygame.KMOD_SHIFT, 0],
[ord(x) for x in string.printable])
digits = Group('Digit', [0], [ord(x) for x in string.digits])
extended_digits = Group('Extended digits', [0], [ord(x) for x in string.digits+'abcdef'])
class Keymap(object):
def __init__(self):
self.obs_activation = Observable()
self.obs_dict = Observable()
# Cache these cause they are rather expensive to generate and
# used a LOT.
self.notify_remove_item = self.obs_dict.notify.remove_item
self.notify_add_item = self.obs_dict.notify.add_item
|
self.notify_replace_item = self.obs_dict.notify.replace_item
self.next_keymap = None
self.key_registrations = {}
self.group_registrations = {}
self.disabled_group_registrations = {}
self.is_active = False
def __contains__(self, key):
if self.next_keymap is not None and key in self.next_keymap:
return True
if key in
|
self.key_registrations:
return True
if key in self.group_registrations:
return True
return False
def iterkeys(self):
for key, value in self.iteritems():
yield key
def iteritems(self):
overridden = set()
if self.next_keymap is not None:
for key, value in self.next_keymap.iteritems():
overridden.add(key)
yield key, value
for group, value in self.group_registrations.iteritems():
yield group, value
for key, value in self.key_registrations.iteritems():
if key not in overridden:
yield key, value
__iter__ = iterkeys
def __getitem__(self, key):
if self.next_keymap is not None and key in self.next_keymap:
return self.next_keymap[key]
if key in self.key_registrations:
return self.key_registrations[key]
if key in self.group_registrations:
return self.group_registrations[key]
raise KeyError("Unknown key", key)
def set_next_keymap(self, keymap):
if self.next_keymap is keymap:
return
if self.next_keymap is not None:
if self.is_active:
self.next_keymap.deactivate()
# TODO: How to export to function?
for key, value in self.next_keymap.iteritems():
if keymap is not None and isinstance(key, Key) and key in keymap:
# The key will remain overrided
continue
self._next_keymap_remove_item(key, value)
self.next_keymap.obs_dict.remove_observer(self)
prev_keymap = self.next_keymap
self.next_keymap = keymap
if self.next_keymap is not None:
self.next_keymap.obs_dict.add_observer(self, '_next_keymap_')
for key, value in self.next_keymap.iteritems():
if prev_keymap is not None and isinstance(key, Key) and key in prev_keymap:
# The key was overridden and remains so, but with a new value
self._next_keymap_replace_item(key, prev_keymap[key], value)
else:
self._next_keymap_add_item(key, value)
if self.is_active:
self.next_keymap.activate()
def _shadow_groups(self, key):
for group in self.group_registrations.keys():
if not group.overlaps(key):
continue
assert group not in self.disabled_group_registrations
gvalue = self.group_registrations.pop(group)
self.disabled_group_registrations[group] = gvalue
self.notify_remove_item(group, gvalue)
def _unshadow_groups(self, key):
for group in self.disabled_group_registrations.keys():
if not group.overlaps(key):
continue
assert group not in self.group_registrations
gvalue = self.disabled_group_registrations.pop(group)
self.group_registrations[group] = gvalue
self.notify_add_item(group, gvalue)
def _next_keymap_add_item(self, key, func):
self._shadow_groups(key)
if key in self.key_registrations:
self.notify_replace_item(key, self.key_registrations[key], func)
else:
self.notify_add_item(key, func)
def _next_keymap_remove_item(self, key, func):
if key in self.key_registrations:
self.notify_replace_item(key, func, se
|
ComputationalSystemsBiology/GINsimScripts
|
stable_core/stable_core.py
|
Python
|
gpl-3.0
| 2,265
| 0.007947
|
import jarray
g = gs.open(gs.args[0])
istates = gs.associated(g, "initialState", True).getInitialStates()
ssrv = gs.service("stable")
def copy_path(values, coreNodes):
n = len(coreNodes)
path = jarray.zeros(n, 'b')
i = 0
for idx in coreNodes:
path[i] = values[idx]
i += 1
return path
def unfold_rec(values, jokers, stack, coreNodes):
if len(jokers) < 1:
path = copy_path(values, coreNodes)
if False:
for p in stack:
idx = 0
ident = True
for v in p:
if v != path[idx]:
|
ident = False
break
idx += 1
if ident:
return
stack.append( path )
return
idx, mx = jokers[0]
njk = jokers[1:]
for v in xrange(mx):
values[idx] = v
unfold_rec(
|
values, njk, stack, coreNodes)
values[idx] = -1
def unfold(values, maxvalues, stack, coreNodes):
n = len(values)
jokers = [ (idx, maxvalues[idx]+1) for idx in xrange(n) if values[idx] == -1 ]
unfold_rec(values, jokers, stack, coreNodes)
return stack
def find_stable_states(model, nodeOrder):
maxvalues = []
coreNodes = []
inputNodes = []
coreOrder = []
idx = 0
for n in nodeOrder:
if n.isInput():
inputNodes.append(idx)
else:
coreNodes.append(idx)
coreOrder.append(n)
maxvalues.append( n.getMaxValue() )
idx += 1
unfoldNodes = xrange(len(coreNodes))
searcher = ssrv.getStableStateSearcher(model)
searcher.call()
paths = searcher.getPaths()
values = paths.getPath()
stack = []
for l in paths:
path = copy_path(values, coreNodes)
#stack.append(l)
unfold(path, maxvalues, stack, unfoldNodes)
for path in stack:
name = istates.nameState(path, coreOrder)
if name is None:
name = ""
state = ""
for v in path:
if v < 0: state += "*"
else: state += "%d" % v
print name + "\t" + state
# Get stable states for all perturbations
model = g.getModel()
find_stable_states(model, g.getNodeOrder())
|
gkoehler/TootList
|
TootList/manage.py
|
Python
|
mit
| 257
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "TootList.settings.local")
|
from
|
django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
armstrong/armstrong.apps.events
|
armstrong/apps/events/tests/managers.py
|
Python
|
apache-2.0
| 2,622
| 0.002288
|
import random
from datet
|
ime import date, timedelta, datetime
from django.core.urlresolvers import reverse
from ._utils import generate_random_event, TestCase, hours_ago, hours_ahead
from ..models import Event
class EventManagerTestCase(TestCase):
def test_upcoming_future(self):
event_future = generate_random_event(hours_ahead(1), hours_ahead(2))
self.assertTrue(event_future in Event.objects.upc
|
oming())
def test_upcoming_in_progress(self):
event_inprogress = generate_random_event(hours_ago(1), hours_ahead(1))
self.assertTrue(event_inprogress in Event.objects.upcoming())
self.assertTrue(event_inprogress in Event.objects.upcoming(days=1))
def test_upcoming_happened_today(self):
""" don't run this at 12am! go to bed """
event_happened_today = generate_random_event(hours_ago(2),
hours_ago(1))
self.assertTrue(event_happened_today in Event.objects.upcoming())
self.assertTrue(event_happened_today in Event.objects.upcoming(days=0))
self.assertTrue(event_happened_today in Event.objects.upcoming(days=1))
def test_upcoming_happened_yesterday(self):
yesterday_event = generate_random_event(hours_ago(25),
hours_ago(24))
self.assertFalse(yesterday_event in Event.objects.upcoming())
self.assertFalse(yesterday_event in Event.objects.upcoming(days=0))
self.assertFalse(yesterday_event in Event.objects.upcoming(days=1))
def test_upcoming_tmrw(self):
event_tmrw = generate_random_event(hours_ahead(24),
hours_ahead(25))
self.assertFalse(event_tmrw in Event.objects.upcoming(days=0))
self.assertTrue(event_tmrw in Event.objects.upcoming(days=1))
def test_upcoming_3_days(self):
event_3_days = generate_random_event(hours_ahead(24 * 3),
hours_ahead(24 * 3 + 1))
self.assertTrue(event_3_days in Event.objects.upcoming(days=3))
self.assertFalse(event_3_days in Event.objects.upcoming(days=2))
def test_upcoming_asc_order(self):
events = [generate_random_event(hours_ago(i), hours_ago(i + 1))
for i in random.sample(xrange(-48, 48), 10)]
upcoming = list(Event.objects.upcoming())
self.assertTrue(upcoming == sorted(upcoming,
key=lambda e: e.start_date))
def test_upcoming_no_site(self):
event = generate_random_event(hours_ahead(1), hours_ahead(2))
self.assertTrue(event in Event.on_site.upcoming())
event.sites.clear()
self.assertFalse(event in Event.on_site.upcoming())
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-0.96/django/utils/simplejson/scanner.py
|
Python
|
bsd-3-clause
| 2,009
| 0.002987
|
"""
Iterator based sre token scanner
"""
import sre_parse, sre_compile, sre_constants
from sre_constants import BRANCH, SUBPATTERN
from re import VERBOSE, MULTILINE, DOTALL
import re
__all__ = ['Scanner', 'pattern']
FLAGS = (VERB
|
OSE | MULTILINE | DOTALL)
class Scanner(object):
def __init__(self, lexicon, flags=FLAGS):
self.actions = [None]
# combine phrases into a compound pattern
s = s
|
re_parse.Pattern()
s.flags = flags
p = []
for idx, token in enumerate(lexicon):
phrase = token.pattern
try:
subpattern = sre_parse.SubPattern(s,
[(SUBPATTERN, (idx + 1, sre_parse.parse(phrase, flags)))])
except sre_constants.error:
raise
p.append(subpattern)
self.actions.append(token)
p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
self.scanner = sre_compile.compile(p)
def iterscan(self, string, idx=0, context=None):
"""
Yield match, end_idx for each match
"""
match = self.scanner.scanner(string, idx).match
actions = self.actions
lastend = idx
end = len(string)
while True:
m = match()
if m is None:
break
matchbegin, matchend = m.span()
if lastend == matchend:
break
action = actions[m.lastindex]
if action is not None:
rval, next_pos = action(m, context)
if next_pos is not None and next_pos != matchend:
# "fast forward" the scanner
matchend = next_pos
match = self.scanner.scanner(string, matchend).match
yield rval, matchend
lastend = matchend
def pattern(pattern, flags=FLAGS):
def decorator(fn):
fn.pattern = pattern
fn.regex = re.compile(pattern, flags)
return fn
return decorator
|
jxp360/golfapp
|
golfapp/apps/piffycup/urls.py
|
Python
|
gpl-2.0
| 191
| 0.026178
|
from django.conf.urls import patterns, url
import views as views
urlpatterns = patterns('',
#url(
|
r'^$', views.index, name='index'),
#url(r'index.html', views.index,
|
name='index')
)
|
Alexander-M-Waldman/local_currency_site
|
lib/python2.7/site-packages/allauth/socialaccount/providers/basecamp/views.py
|
Python
|
gpl-3.0
| 1,123
| 0
|
import requests
from allauth.socialaccount.providers.oauth2.views import (OAuth2Adapter,
OAuth2LoginView,
OAuth2CallbackV
|
iew)
from .provider import BasecampProvider
class BasecampOAuth2Adapter(OAuth2Adapter):
provider_id = BasecampProvider.id
access_token_url = 'https://launchpad.37signals.com/authorization/token?type=web_server' # noqa
authorize_url = 'https://launchpad.37signals.com/authorization/new'
profile_url = 'https://launchpad.37signals.com/authorization.json'
def complete_login(self, request, app, token, **kwargs):
headers = {'Authorization
|
': 'Bearer {0}'.format(token.token)}
resp = requests.get(self.profile_url, headers=headers)
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(BasecampOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(BasecampOAuth2Adapter)
|
idlesign/django-sitemessage
|
sitemessage/management/commands/sitemessage_send_scheduled.py
|
Python
|
bsd-3-clause
| 1,014
| 0.001972
|
from traceback import format_exc
from django.core.management.base import BaseCommand
from ...toolbox import
|
send_scheduled_messages
class Command(BaseCommand):
help = 'Sends scheduled messages (both in pending and error statuses).'
def add_arguments(self, parser):
parser.add_argument(
'--priority', action='store', dest='priority', default=None,
help='Allows to filter scheduled messages by a priority number. Defaults to None.')
def
|
handle(self, *args, **options):
priority = options.get('priority', None)
priority_str = ''
if priority is not None:
priority_str = f'with priority {priority} '
self.stdout.write(f'Sending scheduled messages {priority_str} ...\n')
try:
send_scheduled_messages(priority=priority)
except Exception as e:
self.stderr.write(self.style.ERROR(f'Error on send: {e}\n{format_exc()}'))
else:
self.stdout.write('Sending done.\n')
|
DeveloperMal/wger
|
wger/exercises/management/commands/download-exercise-images.py
|
Python
|
agpl-3.0
| 5,449
| 0.002936
|
# -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
import requests
import os
from optparse import make_option
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
from django.conf import settings
from wger.exercises.models import Exercise, ExerciseImage
class Command(BaseCommand):
'''
Download exercise images from wger.de and updates the local database
The script assumes that the local IDs correspond to the remote ones, which
is the case if the user installed the exercises from the JSON fixtures.
Otherwise, the exercise is simply skipped
'''
option_list = BaseCommand.option_list + (
make_option('--remote-url',
action='store',
dest='remote_url',
default='https://wger.de',
help='Remote URL to fetch the exercises from (default: https://wger.de)'),
)
help = ('Download exercise images from wger.de and update the local database\n'
'\n'
'ATTENTION: The script will download the images from the server and add them\n'
' to your local exercises. The exercises are identified by\n'
' their UUID field, if you manually edited or changed it\n'
' the script will not be able to match them.')
def handle(self, *args, **options):
if not settings.MEDIA_ROOT:
raise ImproperlyConfigured('Please set MEDIA_ROOT in your settings file')
remote_url = options['remote_url']
try:
val = URLValidator()
val(remote_url)
except ValidationError:
raise CommandError('Please enter a valid URL')
exercise_api = "{0}/api/v2/exercise/?limit=999"
image_api = "{0}/api/v2/exerciseimage/?exercise={1}"
thumbnail_api = "{0}/api/v2/exerciseimage/{1}/thumbnails/"
# Get all exercises
result = requests.get(exercise_api.format(remote_url)).json()
for exercise_json in result['results']:
exercise_name = exercise_json['name'].encode('utf-8')
exercise_uuid = exercise_json['uuid']
exercise_id = exercise_json['id']
self.stdout.write('')
self.stdout.write(u"*** Processing {0} (ID: {1}, UUID: {2})".format(exercise_name,
exercise_id,
exercise_uuid))
try:
exercise = Exercise.objects.get(uuid=exercise_uuid)
except Exercise.DoesNotExist:
self.stdout.write(' Remote exercise not found in local DB, skipping...')
continue
# Get all images
images = requests.get(image_api.format(remote_url, exercise_id)).json()
if images['count']:
for image_json in images['results']:
image_id = image_json['id']
result = requests.get(thumbnail_api.format(remote_url, image_id)).json()
image_name = os.path.basename(result['original'])
self.stdout.write(' Fetching image {0} - {1}'.format(image_id, image_name))
try:
image = ExerciseImage.objects.get(pk=image_id)
self.stdout.write(' --> Image already present locally, skipping...')
continue
except ExerciseImage.DoesNotExist:
self.stdout.write(' --> Image not found in local DB, creating now...')
image = ExerciseImage()
image.pk = image_id
# Save the downloaded image, see link for details
# http://stackoverflow.com/questions/1308386/programmatically-saving-image-to-
retrieved_image = requests.get(result['original'])
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(retrieved_image.content)
img_temp.flush()
image.exercise = exercise
image.is_main = image_json['is_main']
image.status = image_json['status']
image.image.save(
|
os.path.basename(imag
|
e_name),
File(img_temp),
)
image.save()
else:
self.stdout.write(' No images for this exercise, nothing to do')
|
aldebaran/qibuild
|
python/qisys/sort.py
|
Python
|
bsd-3-clause
| 4,651
| 0.00172
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2021 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" Topological sort """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
__all__ = ["DagError", "assert_dag", "topological_sort"]
class DagError(Exception):
""" Dag Exception """
def __init__(self, node, parent, result):
""" DagError Init """
Exception.__init__(self)
self.node = node
self.parent = parent
self.result = result
def __str__(self):
""" String Representation """
return "Circular dependency error: Starting from '%s', node '%s' depends on '%s', complete path %s" \
% (self.node, self.parent, self.node, self.result)
def assert_dag(data):
"""
Check if data is a dag
>>> assert_dag({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'g', 'c' )})
>>> assert_dag({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'e', 'c' )})
Traceback (most recent call last):
...
DagError: Circular dependency error: Starting from 'e', node 'e' depends on 'e', complete path []
"""
for node, _ in data.items():
_topological_sort(data, node, node, True)
def topological_sort(data, heads):
"""
Topological sort
data should be a dictionary like that (it's a dag):
{
'a' : ( 'b', 'c', 'd' ),
'b' : ( 'e', 'c' )
}
heads are the top of the dag, the result will include all specified heads and their deps
This function return a list. Head will be the last element.
Warning: this sort always find a solution even is data is not a dag!!
If a depend on b and b depend on a, the solution is [ a, b ].
This is ok in our case but could be a problem in other situation.
(you know what? try to use the result you will see if it work!).
>>> topological_sort({
... 'head' : ['telepathe', 'opennao-tools', 'naoqi'],
... 'toolchain' : [],
... 'python-pc' : ['toolchain'],
... 'telepathe' : ['naoqi'],
... 'qt-pc' : ['toolchain'],
... 'opennao-tools': ['toolchain'],
... 'naoqi' : ['qt-pc', 'python-pc', 'streamer', 'toolchain']}, 'head' )
['toolchain', 'qt-pc', 'python-pc', 'streamer', 'naoqi', 'telepathe', 'opennao-tools', 'head']
>>> topological_sort({
... 'a' : ( 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' )}, 'a')
['e', 'c', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'e', 'c' )}, 'a')
['g', 'c', 'e', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'g', 'c' )}, 'a')
['g', 'c', 'e', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'b' ),
... 'b' : ( 'a' ),
... }, 'a')
['b', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'q' : ( 'u', 'i' ),
... 'i' : ( 'y', 'o' ),
... 'e' : ( 'g', 'c' )}, 'a')
['g', 'c', 'e', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'q' : ( 'u', 'i' ),
... 'i' : ( 'y', 'o' ),
... 'e' : ( 'g', 'c' )}, [ 'a', 'q' ])
['g', 'c', 'e', 'b', 'd', 'a', 'u', 'y', 'o', 'i', 'q']
"""
if isinstance(heads, list):
data['internalfakehead'] = heads
head = 'internalfakehead'
result = _topological_sort(data, head, head)
return [x for x in result if x != 'internalfakehead']
head = heads
return _topological_sort(data, head, head)
def _topological_sort(data, head, top_node, raise_exception=False, result=None, visited=None):
""" Internal function """
if not result:
result = []
if not visited:
|
visited = []
deps = data.get(head, list())
if head in visited:
if head == top_node and raise_exception:
raise DagError(head, head, result)
return result
visited.append(head)
for i in deps:
try:
result.index(i)
except ValueError:
# the item does not exist
result = _t
|
opological_sort(data, i, top_node, raise_exception, result, visited)
result.append(head)
return result
if __name__ == "__main__":
import doctest
doctest.testmod()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.