repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
AlexanderVangelov/pjsip
|
tests/pjsua/scripts-sipp/uas-subscribe-terminated-retry.py
|
Python
|
gpl-2.0
| 374
| 0.026738
|
# $Id: uas-subscribe-terminated-retry.py 4188 2012-06-29 09:01:17Z nanang $
#
import inc_const as const
PJSUA = ["--null-audio --max-calls=1 --id sip:pjsua@localhost --add-buddy $SIPP_URI"]
PJSUA_EXPECTS
|
= [[0, "", "s"],
[0, "Subscribe presence of:", "1"],
[0, "Presence subscription .* is TERMINATED", ""],
[0, "Resubscribing .* in 5000 ms", ""
|
]
]
|
j4v/DS_Store-Scanner
|
dsstore_scanner.py
|
Python
|
gpl-3.0
| 6,357
| 0.001573
|
from ds_store import DSStore, DSStoreEntry
from burp import IBurpExtender
from burp import IScannerCheck
from burp import IExtensionStateListener
from burp import IScanIssue
import StringIO
from urlparse import urlparse
def traverse_ds_store_file(d):
"""
Traverse a DSStore object from the node and yeld each entry.
:param d: DSStore object
:return: None
"""
node = d._rootnode
with d._get_block(node) as block:
next_node, count = block.read(b'>II')
if next_node:
for n in range(count):
ptr = block.read(b'>I')[0]
for e in d._traverse(ptr):
yield e
e = DSStoreEntry.read(block)
yield e
for e in d._traverse(next_node):
yield e
else:
for n in range(count):
e = DSStoreEntry.read(block)
yield e
def get_ds_store_content(ds_store_file):
"""
List all entries from a .DS_Store file
:param ds_store_file: .DS_Store file path
:return: Set containing all files/directories found in the .DS_Store file
"""
with DSStore.open(ds_store_file) as d:
ds_store_content = set()
for x in traverse_ds_store_file(d):
if x.filename != '.':
ds_store_content.add(x.filename)
return ds_store_content
class BurpExtender(IBurpExtender, IScannerCheck, IExtensionStateListener):
def registerExtenderCallbacks(self, callbacks):
"""
Implement IBurpExtender
:param callbacks:
:return:
"""
# Callbacks object
self._callbacks = callbacks
# Set extension name
callbacks.setExtensionName(".DS_Store Scanner")
self._callbacks.registerScannerCheck(self)
self._callbacks.registerExtensionStateListener(self)
# Helpers object
self._helpers = callbacks.getHelpers()
return
def doPassiveScan(self, baseRequestResponse):
"""
Burp Scanner invokes this method for each base request/response that is
passively scanned
:param baseRequestResponse:
:return: A list of scan issues (if any), otherwise None
"""
self._requestResponse = baseRequestResponse
scan_issues = self.findDSStoreFiles()
if len(scan_issues) > 0:
return scan_issues
else:
return None
def doActiveScan(self):
"""
Just so the scanner doesn't return a "method not implemented error"
:return: None
"""
return None
def findDSStoreFiles(self):
self._helpers = self._callbacks.getHelpers()
self.scan_issues = []
request = self._requestResponse.getRequest()
path = request.tostring().split()[1]
folder = path.rsplit("/", 1)
# it's a folder
if path.split("?")[0][-1] == "/":
# TODO test to see if there's a .DS_Store file in that folder
pass
# it's a file
else:
filename = path.split("/")[-1].split("?")[0]
# it's a .DS_Store file
if filename == ".DS_Store":
host = self._requestResponse.getHttpService().getHost()
protocol = self._requestResponse.getHttpService().getProtocol()
response = self._requestResponse.getResponse()
responseInfo = self._helpers.analyzeResponse(response)
bodyOffset = responseInfo.getBodyOffset()
ds_store_file = StringIO.StringIO()
ds_store_file.write(response.tostring()[bodyOffset:])
ds_store_content = get_ds_store_content(ds_store_file)
issuename = "Found .DS_Store file"
issuelevel = "Low"
issuedetail = """<p>The .DS_Store file was found to contain the following entries: <br><ul><li>%s</li></ul></p>""" %\
"</li><li>".join(str(x) for x in ds_store_content)
issueremediation = """Some remediation"""
# Create a ScanIssue object and append it to our list of issues
self.scan_issues.append(ScanIssue(self._requestResponse.getHttpService(),
self._helpers.analyzeRequest(
self._requestResponse).getUrl(),
issuename,
issuelevel,
issuedetail,
issueremediation))
# TODO add entries for each file found
for content in ds_store_content:
|
content_url = "%s://%s%s/%s" % (protocol, host, path.rsplit("/", 1)[0],
|
content)
print content_url
return (self.scan_issues)
def consolidateDuplicateIssues(self, existingIssue, newIssue):
if existingIssue.getUrl() == newIssue.getUrl() and \
existingIssue.getIssueDetail() == newIssue.getIssueDetail():
return -1
else:
return 0
def extensionUnloaded(self):
print(".DS_Store Scanner unloaded")
return
# Implementation of the IScanIssue interface with simple constructor and getter methods
class ScanIssue(IScanIssue):
def __init__(self, httpservice, url, name, severity, detailmsg, remediationmsg):
self._url = url
self._httpservice = httpservice
self._name = name
self._severity = severity
self._detailmsg = detailmsg
self._remediationmsg = remediationmsg
def getUrl(self):
return self._url
def getHttpMessages(self):
return None
def getHttpService(self):
return self._httpservice
def getRemediationDetail(self):
return None
def getIssueDetail(self):
return self._detailmsg
def getIssueBackground(self):
return None
def getRemediationBackground(self):
return self._remediationmsg
def getIssueType(self):
return 0
def getIssueName(self):
return self._name
def getSeverity(self):
return self._severity
def getConfidence(self):
return "Certain"
|
ValorNaram/isl
|
inputchangers/002.py
|
Python
|
mit
| 1,044
| 0.029693
|
blocklevel = ["blockquote", "div", "form",
|
"p", "table", "video", "h1", "h2", "h3", "h4", "h5", "h6", "hr", "details", "article", "header", "main"]
def normalizeEnter(src):
#Deletes all user defined for readability reason existing line breaks that are issues for the HTML output
for elem in blocklevel:
while src.find("\r<" + elem) > -1:
src = src.replace("\r<" + elem, "<" + elem)
while sr
|
c.find("</" + elem + ">\r") > -1:
src = src.replace("</" + elem + ">\r", "</" + elem + ">")
while src.find(">\r") > -1:
src = src.replace(">\r", ">") #It is really needed, it created some other bugs?!
while src.find("\r</") > -1:
src = src.replace("\r</", "</") ##It is really needed, it created some other bugs?!
return src
def main(islinput, inputfile, pluginData, globalData):
currentIndex = 0
for item in islinput:
item = normalizeEnter(item) #Deletes not wanted line breaks in order to prevent the problem we have with Markdown.
islinput[currentIndex] = item
currentIndex += 1
return islinput, pluginData, globalData
|
shashi792/courtlistener
|
alert/lib/filesize.py
|
Python
|
agpl-3.0
| 789
| 0
|
alternative = [
(1024 ** 5, 'PB'),
(1024 ** 4, 'TB'),
(1024 ** 3, 'GB'),
(1024 ** 2, 'MB'),
(1024 ** 1, 'KB'),
(1024 ** 0, (' byte', ' bytes')),
]
def size(bytes, system=alternative):
"""Human-readable file size.
"""
for factor, suffix in system:
if bytes >= factor:
break
amount = float(bytes) / factor
if isinstance(suffix, tuple)
|
:
singular, multiple = suffix
if amount == 1:
suffix = singular
else:
suffix = multiple
if suffix == 'PB':
return '%.3f%s' % (amount, suffix)
elif suffix == 'TB':
return '%.2f%s' % (amount, suffix
|
)
elif suffix == 'GB':
return '%.1f%s' % (amount, suffix)
else:
return '%d%s' % (amount, suffix)
|
hubo1016/vlcp
|
vlcp/utils/zookeeper.py
|
Python
|
apache-2.0
| 26,134
| 0.034438
|
'''
Created on 2016/8/25
:author: hubo
'''
from namedstruct import *
from namedstruct.namedstruct import BadFormatError, BadLenError, Parser, _create_struct
def _copy(buffer):
try:
if isinstance(buffer, memoryview):
return buffer.tobytes()
else:
return buffer[:]
except Exception:
return buffer[:]
def _tobytes(s, encoding = 'utf-8'):
if s is None:
return None
elif isinstance(s, bytes):
return s
else:
return s.encode(encoding)
class UStringParser(object):
'''
Jute ustring type.
'''
def __init__(self):
pass
def parse(self, buffer, inlineparent = None):
if len(buffer) < 4:
return None
length = int32.create(buffer[:4])
if length < 0:
return (None, 4)
if len(buffer) < 4 + length:
return None
else:
return (_copy(buffer[4:4+length]), 4 + length)
def new(self, inlineparent = None):
return b''
def create(self, data, inlineparent = None):
r = self.parse(data)
if r is None:
raise BadLenError('Ustring length not match')
return r[0]
def sizeof(self, prim):
prim = _tobytes(prim)
if prim is None:
return 4
else:
return len(prim) + 4
def paddingsize(self, prim):
return self.sizeof(prim)
def tobytes(self, prim, skipprepack = False):
prim = _tobytes(prim)
if prim is None:
return int32.tobytes(-1)
else:
return int32.tobytes(len(prim)) + prim
def tostream(self, prim, stream, skipprepack = False):
prim = _tobytes(prim)
if prim is None:
return stream.write(int32.tobytes(-1))
else:
stream.write(int32.tobytes(len(prim)))
return stream.write(prim) + 4
class ustringtype(typedef):
'''
A int32 followed by variable length bytes
'''
_parser = UStringParser()
def __init__(self, displayname = 'ustring'):
typedef.__init__(self)
self._displayname = displayname
def parser(self):
return self._parser
def __repr__(self, *args, **kwargs):
return self._displayname
ustring = ustringtype()
z_buffer = ustringtype('buffer')
class VectorParser(object):
'''
Jute vector type.
'''
def __init__(self, innerparser):
self._innerparser = innerparser
def parse(self, buffer, inlineparent = None):
if len(buffer) < 4:
return None
length = int32.create(buffer[:4])
if length < 0:
return (None, 4)
start = 4
result = []
for i in range(0, length):
r = self._innerparser.parse(buffer[start:], None)
if r is None:
return None
(inner, size) = r
result.append(inner)
start += size
return (result, start)
def new(self, inlineparent = None):
return []
def create(self, data, inlineparent = None):
r = self.parse(data)
if r is None:
raise BadLenError('Ustring length not match')
return r[0]
def sizeof(self, prim):
if prim is None:
return 4
else:
return sum(self._innerparser.paddingsize(r) for r in prim) + 4
def paddingsize(self, prim):
return self.sizeof(prim)
def tobytes(self, prim, skipprepack = False):
if prim is None:
return int32.tobytes(-1)
else:
return int32.tobytes(len(prim)) + b''.join(self._innerparser.tobytes(r) for r in prim)
def tostream(self, prim, stream, skipprepack = False):
if prim is None:
return stream.write(int32.tobytes(-1))
else:
stream.write(int32.tobytes(len(prim)))
totalsize = 4
for r in prim:
totalsize += self._innerparser.tostream(r, stream)
return totalsize
class vector(typedef):
'''
Jute vector
'''
def __init__(self, innertype):
typedef.__init__(self)
self._innertype = innertype
def _compile(self):
return VectorParser(self._innertype.parser())
def __repr__(self, *args, **kwargs):
return 'vector<' + repr(self._innertype) + '>'
# /* predefined xid's values recognized as special by the server */
zk_xid = enum('zk_xid', globals(), int32,
WATCHER_EVENT_XID = -1,
PING_XID = -2,
AUTH_XID = -4,
SET_WATCHES_XID = -8)
# /* zookeeper event type constants */
zk_watch_event = enum('zk_watch_event', globals(), int32,
CREATED_EVENT_DEF = 1,
DELETED_EVENT_DEF = 2,
CHANGED_EVENT_DEF = 3,
CHILD_EVENT_DEF = 4,
SESSION_EVENT_DEF = -1,
NOTWATCHING_EVENT_DEF = -2)
zk_request_type = enum('zk_request_type', globals(), int32,
ZOO_NOTIFY_OP = 0,
ZOO_CREATE_OP = 1,
ZOO_DELETE_OP = 2,
ZOO_EXISTS_OP = 3,
ZOO_GETDATA_OP = 4,
ZOO_SETDATA_OP = 5,
ZOO_GETACL_OP = 6,
ZOO_SETACL_OP = 7,
ZOO_GETCHILDREN_OP = 8,
ZOO_SYNC_OP = 9,
ZOO_PING_OP = 11,
ZOO_GETCHILDREN2_OP = 12,
ZOO_CHECK_OP = 13,
ZOO_MULTI_OP = 14,
ZOO_CREATE_SESSION_OP = -10,
ZOO_CLOSE_SESSION_OP = -11,
ZOO_SETAUTH_OP = 100,
ZOO_SETWATCHES_OP = 101,
ZOO_SASL_OP = 102, # NOT SUPPORTED
ZOO_ERROR_TYPE = -1
)
zk_client_state = enum('zk_client_state', globals(), int32,
ZOO_DISCONNECTED_STATE = 0,
ZOO_NOSYNC_CONNECTED_STATE = 1,
ZOO_SYNC_CONNECTED_STATE = 3,
ZOO_AUTH_FAILED_STATE = 4,
ZOO_CONNECTED_READONLY_STATE = 5,
ZOO_SASL_AUTHENTICATED_STATE = 6,
ZOO_EXPIRED_STATE = -112
)
zk_err = enum('zk_err', globals(), int32,
ZOO_ERR_OK = 0,
ZOO_ERR_SYSTEMERROR = -1,
ZOO_ERR_RUNTIMEINCONSISTENCY = -2,
ZOO_ERR_DATAINCONSISTENCY = -3,
ZOO_ERR_CONNECTIONLOSS = -4,
ZOO_ERR_MARSHALLINGERROR = -5,
ZOO_ERR_UNIMPLEMENTED = -6,
ZOO_ERR_OPERATIONTIMEOUT = -7,
ZOO_ERR_BADARGUMENTS = -8,
ZOO_ERR_APIERROR = -100,
ZOO_ERR_NONODE = -101,
ZOO_ERR_NOAUTH = -102,
ZOO_ERR_BADVERSION = -103,
ZOO_ERR_NOCHILDRENFOREPHEMERALS = -108,
ZOO_ERR_NODEEXISTS = -110,
ZOO_ERR_NOTEMPTY = -111,
ZOO_ERR_SESSIONEXPIRED = -112,
ZOO_ERR_INVALIDCALLBACK = -113,
ZOO_ERR_INVALIDACL = -114,
ZOO_ERR_AUTHFAILED = -115
)
zk_perm = enum('zk_perm', globals(), int32, True,
ZOO_PERM_READ = 1 << 0,
ZOO_PERM_WRITE = 1 << 1,
ZOO_PERM_CREATE = 1 << 2,
ZOO_PERM_DELETE = 1 << 3,
ZOO_PERM_ADMIN = 1 << 4,
ZOO_PERM_ALL = 0x1f
)
zk_create_flag = enum('zk_create_flag', globals(), int32, True,
ZOO_EPHEMERAL = 1 << 0,
ZOO_SEQUENCE = 1 << 1,
)
Id = nstruct(
(ustring, 'scheme'),
(ustring, 'id'),
name = 'Id',
padding = 1
)
ACL = nstruct(
(zk_perm, 'perms'),
(Id, 'id'),
name = 'ACL',
padding = 1
)
Stat = nstruct(
(int64, 'czxid'), # created zxid
(in
|
t64, 'mzxid'), # last modified zxid
(int64, 'ctime'), # created
(int64, 'mtime'), # last modified
(int32, 'version'), # version
(int32, 'cversion'), # child version
(int32, 'aversion'), # acl version
(int64, 'ephemeralOwner'), # owner id if ephemeral, 0 otw
(int
|
32, 'dataLength'), #length of the data in the node
(int32, 'numChildren'), #number of children of this node
(int64, 'pzxid'), # last modified children
name = 'Stat',
padding = 1
)
# information explicitly stored by the server persistently
StatPersisted = nstruct(
(int64, 'czxid'), # created zxid
(int64, 'mzxid'), # last modified zxid
(int64, 'ctime'), # created
(int64, 'mtime'), # last modified
(int32, 'version'), # version
(int32, 'cversion'), # child version
(int32, 'aversion'), # acl version
(int64, 'ephemeralOwner'), # owner id if ephemeral, 0 otw
(int64, 'pzxid'), # last modified children
name
|
ubuntu-core/snapcraft
|
tests/unit/plugins/v2/test_go.py
|
Python
|
gpl-3.0
| 3,433
| 0.000874
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2020 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Ge
|
neral Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from testtools.
|
matchers import Equals
from testtools import TestCase
from snapcraft.plugins.v2.go import GoPlugin
class GoPluginTest(TestCase):
def test_schema(self):
schema = GoPlugin.get_schema()
self.assertThat(
schema,
Equals(
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"additionalProperties": False,
"properties": {
"go-channel": {"type": "string", "default": "latest/stable"},
"go-buildtags": {
"type": "array",
"uniqueItems": True,
"items": {"type": "string"},
"default": [],
},
},
"required": ["source"],
}
),
)
def test_get_build_snaps(self):
class Options:
go_channel = "14/latest"
plugin = GoPlugin(part_name="my-part", options=Options())
self.assertThat(plugin.get_build_snaps(), Equals({"go/14/latest"}))
def test_get_build_packages(self):
plugin = GoPlugin(part_name="my-part", options=lambda: None)
self.assertThat(plugin.get_build_packages(), Equals({"gcc"}))
def test_get_build_environment(self):
plugin = GoPlugin(part_name="my-part", options=lambda: None)
self.assertThat(
plugin.get_build_environment(),
Equals(
{
"CGO_ENABLED": "1",
"GOBIN": "${SNAPCRAFT_PART_INSTALL}/bin",
"SNAPCRAFT_GO_LDFLAGS": "-ldflags -linkmode=external",
}
),
)
def test_get_build_commands(self):
class Options:
go_buildtags = list()
plugin = GoPlugin(part_name="my-part", options=Options())
self.assertThat(
plugin.get_build_commands(),
Equals(
[
"go mod download",
'go install -p "${SNAPCRAFT_PARALLEL_BUILD_COUNT}" ${SNAPCRAFT_GO_LDFLAGS} ./...',
]
),
)
def test_get_build_commands_with_buildtags(self):
class Options:
go_buildtags = ["dev", "debug"]
plugin = GoPlugin(part_name="my-part", options=Options())
self.assertThat(
plugin.get_build_commands(),
Equals(
[
"go mod download",
'go install -p "${SNAPCRAFT_PARALLEL_BUILD_COUNT}" -tags=dev,debug ${SNAPCRAFT_GO_LDFLAGS} ./...',
]
),
)
|
UManPychron/pychron
|
pychron/spectrometer/tasks/spectrometer_task.py
|
Python
|
apache-2.0
| 11,184
| 0.00152
|
# ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
import time
# ============= standard library imports ========================
from threading import Thread
from pyface.tasks.action.schema import SToolBar
from pyface.tasks.task_layout import TaskLayout, PaneItem, Splitter, VSplitter
from pyface.ui.qt4.tasks.advanced_editor_area_pane import EditorWidget
from traits.api import Any, Instance, on_trait_change
# ============= local library imports ==========================
from pychron.core.ui.gui import invoke_in_main_thread
from pychron.envisage.tasks.editor_task import EditorTask
from pychron.spectrometer.tasks.editor import PeakCenterEditor, ScanEditor, CoincidenceEditor, ScannerEditor
from pychron.spectrometer.tasks.spectrometer_actions import StopScanAction
from pychron.spectrometer.tasks.spectrometer_panes import ControlsPane, \
ReadoutPane, IntensitiesPane, RecordControlsPane, DACScannerPane, MassScannerPane
class SpectrometerTask(EditorTask):
scan_manager = Any
na
|
me = 'Spectrometer'
id = 'pychron.spectrometer'
_scan_editor = Instance(ScanEditor)
tool_bars = [SToolBar(StopScanAction(), )]
def info(self, msg, *args, **kw):
super(SpectrometerTask, self).info(msg)
def spy_position_magnet
|
(self, *args, **kw):
self.scan_manager.position_magnet(*args, **kw)
def spy_peak_center(self, name):
peak_kw = dict(confirm_save=False, warn=True,
new_thread=False,
message='spectrometer script peakcenter',
on_end=self._on_peak_center_end)
setup_kw = dict(config_name=name)
return self._peak_center(setup_kw=setup_kw, peak_kw=peak_kw)
def populate_mftable(self):
sm = self.scan_manager
cfg = sm.setup_populate_mftable()
if cfg:
def func():
refiso = cfg.isotope
ion = sm.ion_optics_manager
ion.backup_mftable()
odefl = []
dets = cfg.get_detectors()
self.debug('setting deflections')
for det, defl in dets:
odefl.append((det, sm.spectrometer.get_deflection(det)))
sm.spectrometer.set_deflection(det, defl)
for di in dets:
ion.setup_peak_center(detector=[di.name], isotope=refiso,
config_name=cfg.peak_center_config.active_item.name,
standalone_graph=False,
new=True,
show_label=True, use_configuration_dac=False)
ion.peak_center.update_others = False
name = 'Pop MFTable {}-{}'.format(di.name, refiso)
invoke_in_main_thread(self._open_editor, PeakCenterEditor(model=ion.peak_center,
name=name))
self._on_peak_center_start()
ion.do_peak_center(new_thread=False, save=True, warn=True)
self._on_peak_center_end()
if not ion.peak_center.isAlive():
break
self.debug('unset deflections')
for det, defl in odefl:
sm.spectrometer.set_deflection(det, defl)
fp = cfg.get_finish_position()
self.debug('move to end position={}'.format(fp))
if fp:
iso, det = fp
if iso and det:
ion.position(iso, det)
t = Thread(target=func)
t.start()
def stop_scan(self):
self.debug('stop scan fired')
editor = self.active_editor
self.debug('active editor {}'.format(editor))
if editor:
if isinstance(editor, (ScanEditor, PeakCenterEditor, CoincidenceEditor)):
self.debug('editor stop')
editor.stop()
def do_coincidence(self):
es = [int(e.name.split(' ')[-1])
for e in self.editor_area.editors
if isinstance(e, CoincidenceEditor)]
i = max(es) + 1 if es else 1
man = self.scan_manager.ion_optics_manager
name = 'Coincidence {:02d}'.format(i)
if man.setup_coincidence():
self._open_editor(CoincidenceEditor(model=man.coincidence, name=name))
man.do_coincidence_scan()
def do_peak_center(self):
peak_kw = dict(confirm_save=True, warn=True,
message='manual peakcenter',
on_end=self._on_peak_center_end)
self._peak_center(peak_kw=peak_kw)
def define_peak_center(self):
from pychron.spectrometer.ion_optics.define_peak_center_view import DefinePeakCenterView
man = self.scan_manager.ion_optics_manager
spec = man.spectrometer
dets = spec.detector_names
isos = spec.isotopes
dpc = DefinePeakCenterView(detectors=dets,
isotopes=isos,
detector=dets[0],
isotope=isos[0])
info = dpc.edit_traits()
if info.result:
det = dpc.detector
isotope = dpc.isotope
dac = dpc.dac
self.debug('manually setting mftable to {}:{}:{}'.format(det, isotope, dac))
message = 'manually define peak center {}:{}:{}'.format(det, isotope, dac)
man.spectrometer.magnet.update_field_table(det, isotope, dac, message)
def _on_peak_center_start(self):
self.scan_manager.log_events_enabled = False
self.scan_manager.scan_enabled = False
def _on_peak_center_end(self):
self.scan_manager.log_events_enabled = True
self.scan_manager.scan_enabled = True
def send_configuration(self):
self.scan_manager.spectrometer.send_configuration()
def prepare_destroy(self):
for e in self.editor_area.editors:
if hasattr(e, 'stop'):
e.stop()
self.scan_manager.prepare_destroy()
super(SpectrometerTask, self).prepare_destroy()
# def activated(self):
# self.scan_manager.activate()
# self._scan_factory()
# super(SpectrometerTask, self).activated()
def create_dock_panes(self):
panes = [
ControlsPane(model=self.scan_manager),
RecordControlsPane(model=self.scan_manager),
MassScannerPane(model=self.scan_manager),
DACScannerPane(model=self.scan_manager),
ReadoutPane(model=self.scan_manager),
IntensitiesPane(model=self.scan_manager)]
panes = self._add_canvas_pane(panes)
return panes
# def _active_editor_changed(self, new):
# if not new:
# try:
# self._scan_factory()
# except AttributeError:
# pass
# private
def _peak_center(self, setup_kw=None, peak_kw=None):
if setup_kw is None:
setup_kw = {}
if peak_kw is None:
peak_kw = {}
es = []
for e in self.editor_area.editors:
if isinstance(e, PeakCenterEditor):
try:
es.append(int(e.name.split(' ')[-1]))
except
|
dsloop/FranERP
|
app/google_drive_api.py
|
Python
|
mit
| 3,922
| 0.000255
|
import httpli
|
b2
import os
impo
|
rt mimetypes
import time
from apiclient import discovery, errors
from googleapiclient.http import MediaFileUpload
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
flags.noauth_local_webserver = True
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/drive-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/drive.file'
CLIENT_SECRET_FILE = "/home/ubuntu/FranERP/google_drive_client_secret.json"
APPLICATION_NAME = 'FranERP'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
APPLICATION_NAME + '-drive-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def list_files():
"""Shows basic usage of the Google Drive API.
Creates a Google Drive API service object and outputs the names and IDs
for up to 10 files.
"""
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('drive', 'v3', http=http)
results = service.files().list(
pageSize=10, fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print 'No files found.'
else:
print 'Files:'
for item in items:
print '{0} ({1})'.format(item['name'], item['id'])
def upload_file(file_to_upload):
"""Uploads a file to Google Drive using the Google Drive API.
file contains the full path for the file to be uploaded
"""
output, e = None, None
try:
_, filename = os.path.split(file_to_upload)
_, file_extension = os.path.splitext(filename)
mimetype = mimetypes.types_map[file_extension]
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('drive', 'v3', http=http)
file_metadata = {
'name': filename,
'mimeType': mimetype
}
media = MediaFileUpload(
file_to_upload, mimetype=mimetype,
resumable=True
)
retries = 0
while output is None:
try:
uploaded_file = service.files().create(
body=file_metadata, media_body=media, fields='id'
).execute()
output = uploaded_file.get('id')
except errors.HttpError as ex:
retries += 1
if retries >= 1:
raise ex
time.sleep(2 ** retries)
except Exception as ex:
if len(ex.message) > 0:
e = ex.message
else:
e = str(e)
return output, e
def main2(file_to_upload):
o, e = upload_file(file_to_upload)
print o, e
if __name__ == '__main__':
main2('/tmp/backup.zip')
|
edx-solutions/edx-platform
|
openedx/tests/completion_integration/test_views.py
|
Python
|
agpl-3.0
| 9,482
| 0.001476
|
# -*- coding: utf-8 -*-
"""
Test models, managers, and validators.
"""
import ddt
from completion import waffle
from completion.test_utils import CompletionWaffleTestMixin
from django.urls import reverse
from rest_framework.test import APIClient
import six
from openedx.core.djangolib.testing.utils import skip_unless_lms
from student.tests.factories import CourseEnrollmentFactory, UserFactory
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
@ddt.ddt
@skip_unless_lms
class CompletionBatchTestCase(CompletionWaffleTestMixin, ModuleStoreTestCase):
"""
Test that BlockCompletion.objects.submit_batch_completion has the desired
semantics.
"""
ENROLLED_USERNAME = 'test_user'
UNENROLLED_USERNAME = 'unenrolled_user'
COURSE_KEY = 'course-v1:TestX+101+Test'
BLOCK_KEY = 'block-v1:TestX+101+Test+type@problem+block@Test_Problem'
# And for old mongo:
COURSE_KEY_DEPRECATED = 'TestX/201/Test'
BLOCK_KEY_DEPRECATED = 'i4x://TestX/201/problem/Test_Problem'
def setUp(self):
"""
Create the test data.
"""
super(CompletionBatchTestCase, self).setUp()
self.url = reverse('completion:v1:completion-batch')
# Enable the waffle flag for all tests
self.override_waffle_switch(True)
# Create course
self.course = CourseFactory.create(
org='TestX', number='101', display_name='Test',
default_store=ModuleStoreEnum.Type.split,
)
self.assertEqual(six.text_type(self.course.id), self.COURSE_KEY)
self.problem = ItemFactory.create(
parent=self.course, category="problem", display_name="Test Problem", publish_item=False,
)
self.assertEqual(six.text_type(self.problem.location), self.BLOCK_KEY)
# And an old mongo course:
self.course_deprecated = CourseFactory.create(
org='TestX', number='201', display_name='Test',
default_store=ModuleStoreEnum.Type.mongo,
)
self.assertEqual(six.text_type(self.course_deprecated.id), self.COURSE_KEY_DEPRECATED)
self.problem_deprecated = ItemFactory.create(
parent=self.course_deprecated, category="problem", display_name="Test Problem",
)
self.assertEqual(six.text_type(self.problem_deprecated.location), self.BLOCK_KEY_DEPRECATED)
# Create users
self.staff_user = UserFactory(is_staff=True)
self.enrolled_user = UserFactory(username=self.ENROLLED_USERNAME)
self.unenrolled_user =
|
UserFactory(username=self.UNENROLLED_USERNAME)
# Enrol one user in the course
CourseEnrollmentFactory.create(user=self.enrolled_user, course_id=self.course.id)
CourseEnrollmentFactory.create(user=self.enrolled_user, course_id=self.course_deprecated.id)
# Login the enrolled user by for all tests
self.client = APIClient()
self.client.force_authenticate(user=self.enrolled
|
_user)
def test_enable_completion_tracking(self):
"""
Test response when the waffle switch is disabled (default).
"""
with waffle.waffle().override(waffle.ENABLE_COMPLETION_TRACKING, False):
response = self.client.post(self.url, {'username': self.ENROLLED_USERNAME}, format='json')
self.assertEqual(response.data, {
"detail":
"BlockCompletion.objects.submit_batch_completion should not be called when the feature is disabled."
})
self.assertEqual(response.status_code, 400)
@ddt.data(
# Valid submission
(
{
'username': ENROLLED_USERNAME,
'course_key': COURSE_KEY,
'blocks': {
BLOCK_KEY: 1.0,
}
}, 200, {'detail': 'ok'}
),
# Valid submission (old mongo)
(
{
'username': ENROLLED_USERNAME,
'course_key': COURSE_KEY_DEPRECATED,
'blocks': {
BLOCK_KEY_DEPRECATED: 1.0,
}
}, 200, {'detail': 'ok'}
),
# Blocks list can be empty, though it's a no-op
(
{
'username': ENROLLED_USERNAME,
'course_key': COURSE_KEY,
'blocks': [],
}, 200, {"detail": "ok"}
),
# Course must be a valid key
(
{
'username': ENROLLED_USERNAME,
'course_key': "not:a:course:key",
'blocks': {
BLOCK_KEY: 1.0,
}
}, 400, {"detail": "Invalid learning context key: not:a:course:key"}
),
# Block must be a valid key
(
{
'username': ENROLLED_USERNAME,
'course_key': COURSE_KEY,
'blocks': {
'not:a:block:key': 1.0,
}
}, 400, {"detail": "Invalid block key: not:a:block:key"}
),
# Block not in course
(
{
'username': ENROLLED_USERNAME,
'course_key': COURSE_KEY,
'blocks': {
'block-v1:TestX+101+OtherCourse+type@problem+block@other': 1.0,
}
},
400,
{
"detail": (
u"Block with key: 'block-v1:TestX+101+OtherCourse+type@problem+block@other' "
u"is not in context {}".format(COURSE_KEY)
)
}
),
# Course key is required
(
{
'username': ENROLLED_USERNAME,
'blocks': {
BLOCK_KEY: 1.0,
}
}, 400, {"detail": "Key 'course_key' not found."}
),
# Blocks is required
(
{
'username': ENROLLED_USERNAME,
'course_key': COURSE_KEY,
}, 400, {"detail": "Key 'blocks' not found."}
),
# Ordinary users can only update their own completions
(
{
'username': UNENROLLED_USERNAME,
'course_key': COURSE_KEY,
'blocks': {
BLOCK_KEY: 1.0,
}
}, 403, {"detail": "You do not have permission to perform this action."}
),
# Username is required
(
{
'course_key': COURSE_KEY,
'blocks': {
BLOCK_KEY: 1.0,
}
}, 403, {"detail": 'You do not have permission to perform this action.'}
),
# Course does not exist
(
{
'username': ENROLLED_USERNAME,
'course_key': 'course-v1:TestX+101+Test2',
'blocks': {
BLOCK_KEY: 1.0,
}
}, 400, {"detail": "User is not enrolled in course."}
),
)
@ddt.unpack
def test_batch_submit(self, payload, expected_status, expected_data):
"""
Test the batch submission response for student users.
"""
response = self.client.post(self.url, payload, format='json')
self.assertEqual(response.data, expected_data)
self.assertEqual(response.status_code, expected_status)
@ddt.data(
# Staff can submit completion on behalf of other users
(
{
'username': ENROLLED_USERNAME,
'course_key': COURSE_KEY,
'blocks': {
BLOCK_KEY: 1.0,
}
}, 200, {'detail': 'ok'}
),
# Staff can submit completion on behalf of other users (old mongo)
(
{
'username': ENROLLED_USERNAME,
'course_key': COURSE_KEY_DEPRECATED,
'blocks': {
BLOCK_KEY_DEPRECATED: 1.0,
}
}, 200, {'detail': 'ok'}
),
# User must b
|
castelao/CoTeDe
|
cotede/qctests/fuzzylogic.py
|
Python
|
bsd-3-clause
| 2,680
| 0.002985
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Quality Control based on fuzzy logic.
"""
import logging
import numpy as np
from .core import QCCheckVar
from .gradient import gradient
from .spike import spike
from .woa_normbias import woa_normbias
from cotede.fuzzy import fuzzy_uncertainty
module_logger = logging.getLogger(__name__)
def fuzzylogic(features, cfg, require="all"):
"""
FIXME: Think about, should I return 0, or have an assert, and at qc.py
all qc tests are applied with a try, and in case it fails it flag
0s.
"""
require = cfg.get("require", require)
if (require == "all") and not np.all([f in features for f in cfg["features"]]):
module_logger.warning(
"Not all features (%s) required by fuzzy logic are available".format(
cfg["features"].keys()
)
)
raise KeyError
uncertainty = fuzzy_uncertainty(
data=features, features=cfg["features"], output=cfg["output"], require=require
)
return uncertainty
class FuzzyLogic(QCCheckVar):
def set_features(self):
self.features = {}
for v in [f for f in self.cfg["features"] if f not in self.features]:
if v == "woa_bias":
woa_comparison = woa_normbias(self.data, self.varname, self.attrs)
self.features[v] = woa_comparison["woa_bias"]
elif v == "woa_normbias":
woa_comparison = woa_normbias(self.data, self.varname, self.attrs)
self.features[v] = woa_comparison["woa_normbias"]
elif v == "spike":
self.features[v] = spike(self.data[self.varname])
elif v == "gradient":
self.features[v] = gradient(self.data[self.varname])
self.features["fuzzylogic"] = fuz
|
zylogic(self.features, self.cfg)
def test(self):
self.flags = {}
cfg = self.cfg
flag = np.zeros(np.shape(self.data[self.varname]), dtype="i1")
uncertainty = self.features["fuzzylogic"]
# FIXME: As it is now, it wil
|
l have no zero flag value. Think about cases
# where some values in a profile would not be estimated, hence flag=0
# I needed to use np.nonzeros because now uncertainty is a masked array,
# to accept when a feature is masked.
flag[np.nonzero(uncertainty <= 0.29)] = 1
flag[np.nonzero((uncertainty > 0.29) & (uncertainty <= 0.34))] = 2
flag[np.nonzero((uncertainty > 0.34) & (uncertainty <= 0.72))] = 3
flag[np.nonzero(uncertainty > 0.72)] = 4
self.flags["fuzzylogic"] = flag
|
jungla/ICOM-fluidity-toolbox
|
2D/U/plot_drate_z.py
|
Python
|
gpl-2.0
| 3,339
| 0.03504
|
import os, sys
import myfun
import numpy as np
import matplotlib as mpl
mpl.use('ps')
import matplotlib.pyplot as plt
import lagrangian_stats
import fio
## READ archive (too many points... somehow)
# args: name, dayi, dayf, days
#label = 'm_25_1_particles'
#label_25 = 'm_25_1_particles'
label = 'm_25_2_512'
label_25 = 'm_25_2_512'
basename = 'mli'
#dayi = 0+481
#days = 8
#dayf = 240 + days + dayi
dayi = 0
days = 2
dayf = 60 + days
time = range(dayi,dayf,days)
print time
path = './Velocity_CG/'
try: os.stat('./plot/'+label)
except OSError: os.mkdir('./plot/'+label)
# dimensions archives
# ML exp
Ylist_25 = np.linspace(0,4000,321)
Xlist_25 = np.linspace(0,10000,801)
#Ylist_25 = np.linspace(0,2000,161)
#Xlist_25 = np.linspace(0,2000,161)
dl = [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1]
Zlist = -1*np.cumsum(dl)
xn_25 = len(Xlist_25)
yn_25 = len(Ylist_25)
zn = len(Zlist)
dx_25 = np.gradient(Xlist_25)
dz_25 = np.gradient(Zlist)
FW_25 = np.zeros((yn_25,xn_25,zn,len(range(dayi,dayf,days))))
#mld_25 = np.zeros(len(range(dayi,dayf,days)))
nu_h = 0.05
nu_v = 0.0005
for t in range(len(time)):
print 'time:', time[t]
tlabel = str(time[t])
while len(tlabel) < 3: tlabel = '0'+tlabel
#Velocity_CG_m_50_6e_9.csv
file0_U = path+'Velocity_CG_0_'+label_25+'_'+str(time[t])+'.csv'
file0_V = path+'Velocity_CG_1_'+label_25+'_'+str(time[t])+'.csv'
file0_W = path+'Velocity_CG_2_'+label_25+'_'+str(time[t])+'.csv'
file1 = 'drate_'+label+'_'+str(time[t])
file1_25 = 'drate_'+label_25
U_25 = fio.read_Scalar(file0_U,xn_25,yn_25,zn)
V_25 = fio.read_Scalar(file0_V,xn_25,yn_25,zn)
W_25 = fio.read_Scalar(file0_W,xn_25,yn_25,zn)
for i in range(0,len(Xlist_25),50):
for j in range(0,len(Ylist_25),50):
#FW_25[j,i,:,t] = 0.5*nu_h*((np.gradient(U_25[i,j,:]-np.mean(U_25[i,j,:]))/dz_25)**2 + (np.gradient(V_25[i,j,:]-np.mean(V_25[i,j,:]))/dz_25)**2) + 0.5*nu_v*((np.gradi
|
ent(W_25[i,j,:]-np.mean(W_25[i,j,:]))/dz_25)**2)
FW_25[j,i,:,t] = 0.5*nu_h
|
*((np.gradient(U_25[i,j,:])/dz_25)**2 + (np.gradient(V_25[i,j,:])/dz_25)**2) + 0.5*nu_v*(np.gradient(W_25[i,j,:])/dz_25)**2
FW_t25 = np.mean(np.mean(FW_25,0),0)
# plt.figure(figsize=(4,8))
# p25, = plt.semilogx(7.5*0.05*FW_t25[:,t],Zlist,'k--',linewidth=2)
FW_m = -11
FW_M = -7
plt.figure(figsize=(8,4))
plt.contourf(time,Zlist,np.log10(FW_t25),np.linspace(FW_m,FW_M,30),extend='both')
plt.colorbar(ticks=np.linspace(FW_m,FW_M,7))
#plt.colorbar()
#plt.plot(time,mld_25,'k')
plt.xlabel('Time [hr]',fontsize=18)
plt.ylabel('Depth [m]',fontsize=18)
plt.xticks(np.linspace(dayi,dayf-days,13),np.linspace(48,72,13).astype(int))
#plt.xticks(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7),np.round(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7)*360000)/100,fontsize=16)
#plt.yticks(fontsize=16)
plt.savefig('./plot/'+label+'/'+file1_25+'.eps',bbox_inches='tight')
print './plot/'+label+'/'+file1_25+'.eps'
plt.close()
import csv
f = open('drate_'+label+'.csv','w')
writer = csv.writer(f)
row = []
row.append('time')
for k in Zlist:
row.append(k)
writer.writerow(row)
time = np.asarray(time)*1440 + 48*3600
for t in range(len(time)):
row = []
#row.append((time[t]-1)*360)
row.append((time[t]))
for k in range(len(Zlist)):
row.append(FW_t25[k,t])
writer.writerow(row)
f.close()
|
JoKaWare/WTL-DUI
|
tools/grit/grit/tool/postprocess_interface.py
|
Python
|
bsd-3-clause
| 1,029
| 0.004859
|
#!/usr/bin/env python
# Copyright (c) 2012 The
|
Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
''' Base class for postprocessing of RC files.
'''
import sys
class PostProcessor(object):
''' Base class for postprocessing of the RC file data before being
outpu
|
t through the RC2GRD tool. You should implement this class if
you want GRIT to do specific things to the RC files after it has
converted the data into GRD format, i.e. change the content of the
RC file, and put it into a P4 changelist, etc.'''
def Process(self, rctext, rcpath, grdnode):
''' Processes the data in rctext and grdnode.
Args:
rctext: string containing the contents of the RC file being processed.
rcpath: the path used to access the file.
grdtext: the root node of the grd xml data generated by
the rc2grd tool.
Return:
The root node of the processed GRD tree.
'''
raise NotImplementedError()
|
GajaZ/Locevanje_odpadkov
|
kamera_klik.py
|
Python
|
gpl-3.0
| 666
| 0.016517
|
from SimpleCV import Camera, Dis
|
play
import time
import wx
def slika_klik():
|
cam = Camera(0)
display = Display()
cam.getImage().show()
i = 0
while display.isNotDone():
start_time = time.clock()
img = cam.getImage()
if display.mouseLeft:
img.save("C:\Users\Gaja\Desktop\DIR2017\Locevanje_odpadkov-master\Zajete_slike\slika_v_obdelavi" + str(i) + ".jpg")
end_time = time.clock()
time_taken = end_time - start_time
print("Zajem slike traja: " + str(time_taken))
time.sleep(2)
i=i+1
img.save(display)
slika_klik()
|
edespino/gpdb
|
src/test/tinc/tincrepo/mpp/gpdb/tests/storage/walrepl/gpinitstandby/__init__.py
|
Python
|
apache-2.0
| 6,141
| 0.007816
|
"""
Copyright (c) 2004-Present Pivotal Software, Inc.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import socket
from time import sleep
import pexpect as pexpect
import tinctest
from tinctest.lib import local_path
from gppylib.commands.base import Command
from mpp.lib.config import GPDBConfig
from mpp.lib.PSQL import PSQL
from mpp.gpdb.tests.storage.walrepl.run import StandbyRunMixin
from mpp.gpdb.tests.storage.walrepl.lib.verify import StandbyVerify
from mpp.gpdb.tests.storage.walrepl.lib import WalReplException
from mpp.gpdb.tests.storage.walrepl.lib.pg_util import GpUtility
class GpinitStandby(object):
'''Class for gpinitstandby operations
Disclaimer: Some of these may repeat with the mpp/lib version'''
def __init__(self):
self.stdby = StandbyVerify()
self.runmixin = StandbyRunMixin()
self.runmixin.createdb(dbname='walrepl')
self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
self.config = GPDBConfig()
self.pgutil = GpUtility()
self.host = socket.gethostname()
def run(self, option = ''):
'''Runs gpinitstandby and returns True if successfull '''
gpinitstandby_cmd = 'gpinitstandby -a %s' % option
cmd = Command(name='Running Gpinitstandby', cmdStr="%s" % gpinitstandby_cmd)
tinctest.logger.info(" %s" % cmd)
cmd.run(validateAfter=False)
result = cmd.get_results()
if result.rc != 0:
return False
return True
def verify_gpinitstandby(self, primary_pid):
'''Verify the presence of standby in recovery mode '''
if (self.stdby.check_gp_segment_config()) and (self.stdby.check_pg_stat_replication()) and (self.stdby.check_standby_processes())and self.compare_primary_pid(primary_pid) :
return True
return False
def get_masterhost(self):
std_sql = "select hostname from gp_segment_configuration where content=-1 and role='p';"
master_host = PSQL.run_sql_command(std_sql, flags = '-q -t', dbname= 'postgres')
return master_host.strip()
def get_standbyhost(self):
std_sql = "select hostname from gp_segment_configuration where content='-1' and role='m';"
standby_host = PSQL.run_sql_command(std_sql, flags = '-q -t', dbname= 'postgres')
return standby_host.strip()
def get_filespace_location(self):
fs_sql = "select fselocation from pg_filespace_entry where fselocation like '%fs_walrepl_a%' and fsedbid=1;"
filespace_loc = PSQL.run_sql_command(fs_sql, flags = '-q -t', dbname= 'postgres')
return filespace_loc.strip()
def get_standbyhostnode(self):
'''
Function used to obtain the hostname of one of the segment node inorder to use it as the standby master node"
@return : returns the hostname of the segment node which can be used as the standby master node
'''
hostlist = self.config.get_hosts()
standby = ''
for host in hostlist:
if host.strip() != self.host:
standby = host.strip()
if len(standby) > 0 :
return standby
else:
tinctest.logger.error('No segme
|
nt host other than master available to have remote standby')
d
|
ef get_primary_pid(self):
pid = self.pgutil.get_pid_by_keyword(pgport=os.environ.get('PGPORT'), keyword=self.mdd)
if int(pid) == -1:
raise WalReplException('Unable to get pid of primary master process')
else:
return int(pid)
def compare_primary_pid(self, initial_pid):
final_pid = self.get_primary_pid()
if initial_pid == final_pid :
return True
return False
def create_dir_on_standby(self, standby, location):
fs_cmd = "gpssh -h %s -e 'rm -rf %s; mkdir -p %s' " % (standby, location, location)
cmd = Command(name='Make dierctory on standby before running the command', cmdStr = fs_cmd)
tinctest.logger.info('%s' % cmd)
cmd.run(validateAfter=True)
result = cmd.get_results()
if result.rc != 0:
raise WalReplException('Unable to create directory on standby')
else:
return True
def initstand_by_with_default(self):
master_host = self.get_masterhost()
gp_cmd = "/bin/bash -c 'gpinitstandby -s %s'" % (master_host)
logfile = open(local_path('install.log'),'w')
child = pexpect.spawn(gp_cmd, timeout=400)
child.logfile = logfile
sleep(2)
check = child.expect(['.* Enter standby filespace location for filespace pg_system .*', ' '])
if check != 0:
child.close()
l_file = open(local_path('install.log'),'r')
lines = l_file.readlines()
for line in lines:
if 'default: NA' in line:
return True
return False
def init_with_prompt(self,filespace_loc):
standby = self.get_standbyhostnode()
gp_cmd = "/bin/bash -c 'gpinitstandby -s %s -a'" % (standby)
logfile = open(local_path('install2.log'),'w')
child = pexpect.spawn(gp_cmd, timeout=400)
child.logfile = logfile
sleep(5)
check = child.expect(['.* Enter standby filespace location for filespace.*', ' '])
child.sendline(filespace_loc)
sleep(10)
check = child.expect(['.*Successfully created standby master.*'])
if check != 0:
tinctest.logger.error('gpinitstandy failed')
return False
child.close()
return True
|
adamhadani/HBasta
|
hbasta/__init__.py
|
Python
|
apache-2.0
| 69
| 0
|
#!/u
|
sr/bin/env python
from _api im
|
port *
from _intoptparse import *
|
cnvogelg/fs-uae-gles
|
launcher/fs_uae_launcher/ui/LauncherFileDialog.py
|
Python
|
gpl-2.0
| 2,280
| 0.002632
|
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import fs_uae_launcher.fsui as fsui
from ..Settings import Settings
from ..I18N import _, ngettext
from .Skin import Skin
class LauncherFileDialog(fsui.FileDialog):
def __init__(self, parent, title, type, last_path="",
multiple=False, dir_mode=False):
self.dir_mode = dir_mode
|
self.settings_key = "last_{0}_dir".format(type)
|
directory = ""
if last_path and last_path not in ["internal"]:
print("last_path", repr(last_path))
#if os.path.isdir(last_path):
# last_path_dir = last_path
#else:
last_path_dir = os.path.dirname(last_path)
print("last_path_dir", last_path_dir)
if last_path_dir:
if os.path.exists(last_path_dir):
directory = last_path_dir
else:
# file was relative to default directory
directory = self.get_default_directory(type)
if not directory:
value = Settings.get(self.settings_key)
print(self.settings_key, value)
if value and os.path.exists(value):
directory = value
if not directory:
directory = self.get_default_directory(type)
fsui.FileDialog.__init__(self, parent, title, directory,
dir_mode=dir_mode, multiple=multiple)
def get_default_directory(self, type):
if type == "floppy":
return Settings.get_floppies_dir()
elif type == "cd":
return Settings.get_cdroms_dir()
elif type == "hd":
return Settings.get_hard_drives_dir()
elif type == "rom":
return Settings.get_kickstarts_dir()
raise Exception("unknown file dialog type")
def show_modal(self):
if fsui.FileDialog.show_modal(self):
path = self.get_path()
#if os.path.isdir(path):
# last_path_dir = path
#lse:
last_path_dir = os.path.dirname(path)
Settings.set(self.settings_key, last_path_dir)
return True
return False
|
isotoma/KeenClient-Python
|
keen/persistence_strategies.py
|
Python
|
mit
| 1,595
| 0
|
__author__ = 'dkador'
class BasePersistenceStrategy(object):
"""
A persistence strategy is responsible for persisting a given event
somewhere (i.e. directly to Keen, a local cache, a Redis queue, etc.)
"""
def per
|
sist(self, event):
"""Persists the given event somewhere.
:param event: the event to persist
"""
raise NotImplementedError()
class DirectPersistenceStrategy(BasePersistenceStrategy):
"""
A persistence strategy that saves directly to Keen and bypasses any local
cache.
"""
def __init__(self, api):
""" Initializer fo
|
r DirectPersistenceStrategy.
:param api: the Keen Api object used to communicate with the Keen API
"""
super(DirectPersistenceStrategy, self).__init__()
self.api = api
def persist(self, event):
""" Posts the given event directly to the Keen API.
:param event: an Event to persist
"""
self.api.post_event(event)
def batch_persist(self, events):
""" Posts the given events directly to the Keen API.
:param events: a batch of events to persist
"""
self.api.post_events(events)
class RedisPersistenceStrategy(BasePersistenceStrategy):
"""
A persistence strategy that persists events to Redis for later processing.
Not yet implemented.
"""
pass
class FilePersistenceStrategy(BasePersistenceStrategy):
"""
A persistence strategy that persists events to the local file system for
later processing.
Not yet implemented.
"""
pass
|
henryiii/semester
|
semester/gui/sandals.py
|
Python
|
mit
| 15,589
| 0.007569
|
from contextlib import contextmanager
import threading
try: # python 3
import tkinter
from tkinter import messagebox
from tkinter import filedialog
from tkinter import simpledialog
from tkinter import scrolledtext
from tkinter import Scrollbar
from tkinter import N
from tkinter import NE
from tkinter import E
from tkinter import SE
from tkinter import S
from tkinter import SW
from tkinter import W
from tkinter import NW
from tkinter import CENTER
from tkinter import BOTTOM
from tkinter import LEFT
from tkinter import RIGHT
from tkinter import TOP
from tkinter import NONE
from tkinter import NORMAL
from tkinter import ACTIVE
from tkinter import DISABLED
from tkinter import FLAT
from tkinter import RAISED
from tkinter import SUNKEN
from tkinter import GROOVE
from tkinter import RIDGE
from tkinter import TRUE
from tkinter import FALSE
except ImportError: # python 2
import Tkinter as tkinter
import tkMessageBox as messagebox
import tkFileDialog as filedialog
import tkSimpleDialog as simpledialog
import ScrolledText as scrolledtext
from Tkinter import Scrollbar
from Tkinter import N
from Tkinter import NE
from Tkinter import E
from Tkinter import SE
from Tkinter import S
from Tkinter import SW
from Tkinter import W
from Tkinter import NW
from Tkinter import CENTER
from Tkinter import BOTTOM
from Tkinter import LEFT
from Tkinter import RIGHT
from Tkinter import TOP
from Tkinter import NONE
from Tkinter import NORMAL
from Tkinter import ACTIVE
from Tkinter import DISABLED
from Tkinter import FLAT
from Tkinter import RAISED
from Tkinter import SUNKEN
from Tkinter import GROOVE
from Tkinter import RIDGE
from Tkinter import TRUE
from Tkinter import FALSE
_root = None
_pack_side = None
_events = []
_radioVariable = None
class AutoScrollbar(Scrollbar):
# a scrollbar that hides itself if it's not needed. only
# works if you use the grid geometry manager.
def set(self, lo, hi):
if float(lo) <= 0.0 and float(hi) >= 1.0:
# grid_remove is currently missing from Tkinter!
self.tk.call("grid", "remove", self)
else:
self.grid()
Scrollbar.set(self, lo, hi)
def pack(self, **kw):
raise TclError("cannot use pack with this widget")
def place(self, **kw):
raise TclError("cannot use place with this widget")
class window(tkinter.Tk):
def __init__(self, title="Window", **kw):
tkinter.Tk.__init__(self)
self.title(title)
self.kw = kw
def __enter__(self):
global _root, _pack_side
# create scroll bar
self.vscrollbar = AutoScrollbar(self)
self.vscrollbar.grid(row=0, column=1, sticky=N+S)
# create canvas
self.canvas = tkinter.Canvas(self,
yscrollcommand=self.vscrollbar.set, bd=5)
self.canvas.grid(row=0, column=0, sticky=N+S+E+W)
# configure scroll bar for canvas
self.vscrollbar.config(command=self.canvas.yview)
# make the canvas expandable
self.grid_rowconfigure(0, weight=1)
self.grid_columnconfigure(0, weight=1)
# create frame in canvas
|
self.frame = tkinter.Frame(self.canvas)
self.frame.columnconfigure(0, w
|
eight=1)
self.frame.columnconfigure(1, weight=1)
_pack_side = TOP
_root = self.frame
return self # was _root for some reason
def __exit__(self, type, value, traceback):
global _root, _pack_side
# puts tkinter widget onto canvas
self.canvas.create_window(0, 0, anchor=NW, window=self.frame, width = int(self.canvas.config()['width'][4])-int(self.vscrollbar.config()['width'][4]))
# deal with canvas being resized
def resize_canvas(event):
self.canvas.create_window(0, 0, anchor=NW, window=self.frame, width = int(event.width)-int(self.vscrollbar.config()['width'][4]))
self.canvas.bind("<Configure>", resize_canvas)
# updates geometry management
self.frame.update_idletasks()
# set canvas scroll region to all of the canvas
self.canvas.config(scrollregion=self.canvas.bbox("all"))
# set minimum window width
self.update()
self.minsize(self.winfo_width(), 0)
self.config(**self.kw)
self.frame.update()
# start mainloop
self.mainloop()
# window closed...
_pack_side = None
# stop all ongoing _events
[event.set() for event in _events]
class slot(tkinter.Frame):
def __init__(self, **kw):
self.kw = kw
def __enter__(self):
global _root, _pack_side
self._root_old = _root
self._pack_side_old = _pack_side
tkinter.Frame.__init__(self, self._root_old, **self.kw)
self.pack( side=self._pack_side_old, fill=tkinter.X)
_root = self
def __exit__(self, type, value, traceback):
global _root, _pack_side
_root = self._root_old
_pack_side = self._pack_side_old
class stack(slot):
def __init__(self, **kw):
slot.__init__(self, **kw)
def __enter__(self):
global _pack_side
slot.__enter__(self)
_pack_side = TOP
return _root
class flow(slot):
def __init__(self, **kw):
slot.__init__(self, **kw)
def __enter__(self):
global _pack_side
slot.__enter__(self)
_pack_side = LEFT
return _root
class button(tkinter.Button, object):
def __init__(self, text="", **kw):
self.kw = kw
self.textvariable = tkinter.StringVar()
self.textvariable.set(self.kw['text'] if 'text' in self.kw else text)
if 'text' in self.kw:
del self.kw['text']
tkinter.Button.__init__(self, _root, textvariable = self.textvariable, **kw)
self.pack( side = _pack_side )
def __call__(self, func):
func.button = self
self.config(command = lambda: func())
return func
@property
def text(self):
return self.textvariable.get()
@text.setter
def text(self, text):
self.textvariable.set(text)
class label(tkinter.Label, object):
def __init__(self, text="", **kw):
self.kw = kw
self.textvariable = tkinter.StringVar()
self.textvariable.set(self.kw['text'] if 'text' in self.kw else text)
if 'text' in self.kw:
del self.kw['text']
tkinter.Label.__init__(self, _root, textvariable=self.textvariable, **kw)
self.pack( side=_pack_side )
@property
def text(self):
return self.textvariable.get()
@text.setter
def text(self, text):
self.textvariable.set(text)
class message(tkinter.Message, object):
def __init__(self, text="", **kw):
self.kw = kw
self.textvariable = tkinter.StringVar()
self.textvariable.set(self.kw['text'] if 'text' in self.kw else text)
if 'text' in self.kw:
del self.kw['text']
tkinter.Message.__init__(self, _root, textvariable=self.textvariable, anchor=NW, **kw)
self.pack( side=_pack_side )
@property
def text(self):
return self.textvariable.get()
@text.setter
def text(self, text):
self.textvariable.set(text)
class repeat(threading.Thread):
def __init__(self, interval=1):
global _events
threading.Thread.__init__(self)
self.interval = interval
self.stopped = threading.Event()
_events.append(self.stopped)
def __call__(self, func):
self.func = func
self.start()
return func
def run(self):
while not self.stopped.wait(self.interval):
self.func()
class loop(threading.Thread):
def __init__(self):
global _events
threading.Thread.__init__(self)
self.stopped = threading.Event()
_events.append(self.stopped)
def __call__(self, func):
self.func = func
self.sta
|
martinrotter/textilosaurus
|
src/libtextosaurus/3rd-party/scintilla/gtk/DepGen.py
|
Python
|
gpl-3.0
| 752
| 0.014628
|
#!/usr/bin/env python3
# DepGe
|
n.py - produce a make dependencies file for Scintilla
# Copyright 2019 by Neil Hodgson <neilh@sc
|
intilla.org>
# The License.txt file describes the conditions under which this software may be distributed.
# Requires Python 3.6 or later
import sys
sys.path.append("..")
from scripts import Dependencies
topComment = "# Created by DepGen.py. To recreate, run DepGen.py.\n"
def Generate():
sources = ["../src/*.cxx", "../lexlib/*.cxx", "../lexers/*.cxx"]
includes = ["../include", "../src", "../lexlib"]
deps = Dependencies.FindDependencies(["../gtk/*.cxx"] + sources, ["../gtk"] + includes, ".o", "../gtk/")
Dependencies.UpdateDependencies("../gtk/deps.mak", deps, topComment)
if __name__ == "__main__":
Generate()
|
sniperganso/python-manilaclient
|
manilaclient/tests/functional/base.py
|
Python
|
apache-2.0
| 10,433
| 0
|
# Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import traceback
from oslo_log import log
from tempest_lib.cli import base
from tempest_lib import exceptions as lib_exc
from manilaclient import config
from manilaclient.tests.functional import client
from manilaclient.tests.functional import utils
CONF = config.CONF
LOG = log.getLogger(__name__)
class handle_cleanup_exceptions(object):
"""Handle exceptions raised with cleanup operations.
Always suppress errors when lib_exc.NotFound or lib_exc.Forbidden
are raised.
Suppress all other exceptions only in case config opt
'suppress_errors_in_cleanup' is True.
"""
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
if not (isinstance(exc_value,
(lib_exc.NotFound, lib_exc.Forbidden)) or
CONF.suppress_errors_in_cleanup):
return False # Do not suppress error if any
if exc_traceback:
LOG.error("Suppressed cleanup error: "
"\n%s" % traceback.format_exc())
return True # Suppress error if any
class BaseTestCase(base.ClientTestBase):
# Will be cleaned up after test suite run
class_resources = []
# Will be cleaned up after single test run
method_resources = []
def setUp(self):
super(BaseTestCase, self).setUp()
self.addCleanup(self.clear_resources)
@classmethod
def tearDownClass(cls):
super(BaseTestCase, cls).tearDownClass()
cls.clear_resources(cls.class_resources)
@classmethod
def clear_resources(cls, resources=None):
"""Deletes resources, that were created in test suites.
This method tries to remove resources from resource list,
if it is not found, assume it was deleted in test itself.
It is expected, that all resources were added as LIFO
due to restriction of deletion resources, that are in the chain.
:param resources: dict with keys 'type','id','client' and 'deleted'
"""
if resources is None:
resources = cls.method_resources
for res in resources:
if "deleted" not in res:
res["deleted"] = False
if "client" not in res:
res["client"] = cls.get_cleanup_client()
if not(res["deleted"]):
res_id = res["id"]
client = res["client"]
with handle_cleanup_exceptions():
# TODO(vponomaryov): add support for other resources
if res["type"] is "share_type":
client.delete_share_type(
res_id, microversion=res["microversion"])
client.wait_for_share_type_deletion(
res_id, microversion=res["microversion"])
elif res["type"] is "share_network":
client.delete_share_network(
res_id, microversion=res["microversion"])
client.wait_for_share_network_deletion(
res_id, microversion=res["microversion"])
elif res["type"] is "share":
client.delete_share(
res_id, microversion=res["microversion"])
client.wait_for_share_deletion(
res_id, microversion=res["microversion"])
else:
LOG.warning("Provided unsupported resource type for "
"cleanup '%s'. Skipping." % res["type"])
res["deleted"] = True
@classmethod
def get_admin_client(cls):
manilaclient = client.ManilaCLIClient(
username=CONF.admin_username,
password=CONF.admin_password,
tenant_name=CONF.admin_tenant_name,
uri=CONF.admin_auth_url or CONF.auth_url,
insecure=CONF.insecure,
cli_dir=CONF.manila_exec_dir)
# Set specific for admin project share network
manilaclient.share_network = CONF.admin_share_network
return manilaclient
@classmethod
def get_user_client(cls):
manilaclient = client.ManilaCLIClient(
username=CONF.username,
password=CONF.password,
tenant_name=CONF.tenant_name,
uri=CONF.auth_url,
insecure=CONF.insecure,
cli_dir=CONF.manila_exec_dir)
# Set specific for user project share network
manilaclient.share_network = CONF.share_network
return manilaclient
@property
def admin_client(self):
if not hasattr(self, '_admin_client'):
self._admin_client = self.get_admin_client()
return self._admin_client
@property
def user_client(self):
if not hasattr(self, '_user_client'):
self._user_client = self.get_user_client()
return self._user_client
def _get_clients(self):
return {'admin': self.admin_client, 'user': self.user_client}
def skip_if_microversion_not_supported(self, microversion):
if not utils.is_microversion_supported(microversion):
raise self.skipException(
"Microversion '%s' is not supported." % microversion)
@classmethod
def create_share_type(cls, name=None, driver_handles_share_servers=True,
snapshot_support=True, is_public=True, client=None,
|
cleanup_in_class=True, microversion=None):
if client is None:
client = cls.get_admin_client()
share_type = client.create_share_type(
name=name,
|
driver_handles_share_servers=driver_handles_share_servers,
snapshot_support=snapshot_support,
is_public=is_public,
microversion=microversion,
)
resource = {
"type": "share_type",
"id": share_type["ID"],
"client": client,
"microversion": microversion,
}
if cleanup_in_class:
cls.class_resources.insert(0, resource)
else:
cls.method_resources.insert(0, resource)
return share_type
@classmethod
def create_share_network(cls, name=None, description=None,
nova_net_id=None, neutron_net_id=None,
neutron_subnet_id=None, client=None,
cleanup_in_class=True, microversion=None):
if client is None:
client = cls.get_admin_client()
share_network = client.create_share_network(
name=name,
description=description,
nova_net_id=nova_net_id,
neutron_net_id=neutron_net_id,
neutron_subnet_id=neutron_subnet_id,
microversion=microversion,
)
resource = {
"type": "share_network",
"id": share_network["id"],
"client": client,
"microversion": microversion,
}
if cleanup_in_class:
cls.class_resources.insert(0, resource)
else:
cls.method_resources.insert(0, resource)
return share_network
@classmethod
def create_share(cls, share_protocol=None, size=None, share_network=None,
share_type=None, name=None, description=None,
public=False, snapshot=None, metadata=None,
client=None, cleanup_in_class=False,
w
|
FishPi/FishPi-POCV---Command---Control
|
fishpi/ui/main_view_tk.py
|
Python
|
bsd-2-clause
| 13,484
| 0.010902
|
#
# FishPi - An autonomous drop in the ocean
#
# Main View classes for POCV UI.
#
import tkFont
from Tkinter import *
from PIL import Image, ImageTk
class MainView(Frame, object):
""" MainView class for POCV UI. """
def __init__(self, master, view_controller):
super(MainView, self).__init__(master, bd=1, relief=GROOVE)
self.pack()
self.create_widgets(master, view_controller)
def create_widgets(self, master, view_controller):
""" Create widgets for view. """
# top frame
self.top_frame = Frame(master, bd=1, relief=GROOVE)
self.top_frame.pack(fill=X)
# map frame (in top sub-frame)
self.map_frame = MapFrame(self.top_frame, view_controller)
self.map_frame.pack(side=LEFT, fill=X)
# camera frame (in top sub-frame)
self.camera_frame = CameraFrame(self.top_frame, view_controller)
self.camera_frame.pack(side=LEFT, fill=X, expand=True)
# bottom sub-frame (in main frame)
self.bottom_frame = Frame(master, bd=1, relief=GROOVE)
self.bottom_frame.pack(fill=BOTH, expand=True)
# route frame (in bottom sub-frame)
self.route_frame = RouteFrame(self.bottom_frame, view_controller)
self.route_frame.pack(side=LEFT, fill=BOTH, padx=5, pady=5, expand=True)
# info frame (in bottom sub-frame)
self.info_frame = InfoFrame(self.bottom_frame, view_controller)
self.info_frame.pack(side=LEFT, fill=BOTH, pady=5, expand=True)
# controls frame (in bottom sub-frame)
self.controls_frame = ControlsFrame(self.bottom_frame, view_controller)
self.controls_frame.pack(side=LEFT, fill=BOTH, padx=5, pady=5, expand=True)
def update_callback(self):
""" Callback for any view objects that need to requery (rather than observe a model. """
self.camera_frame.update_callback()
class MapFrame(Frame, object):
""" UI Frame displaying map. """
def __init__(self, master, view_controller):
super(MapFrame, self).__init__(master, bd=1, relief=GROOVE)
self._view_controller = view_controller
# get map image
image = view_controller.get_current_map()
# scale and display image
width, height = image.size
scale = .12
image_resized = image.resize((int(width*scale), int(height*scale)), Image.ANTIALIAS)
photo = ImageTk.PhotoImage(image_resized)
# add overlay
self.top = Canvas(self, width=480, height=240)
self.top.create_image((25,0), image=photo, anchor=NW)
self.top.create_oval((35
|
,190,75,230), width=2, fill="white")
self.top.create_text((55,210), text="H", font=14)
self.image=photo
self.top.bind("<Button-1>", self.click_callback)
self.top.bin
|
d("<B1-Motion>", self.move_callback)
self.top.pack(fill=X)
def click_callback(self, event):
print "clicked at", event.x, event.y
def move_callback(self, event):
print event.x, event.y
class CameraFrame(Frame, object):
""" UI Frame displaying camera image. """
def __init__(self, master, view_controller):
super(CameraFrame, self).__init__(master, bd=1, relief=SUNKEN)
self._view_controller = view_controller
# display image
self.cnvs_camera = Canvas(self, width=320, height=240)
self.update_image()
self.cnvs_camera.pack(fill=BOTH)
def update_image(self):
# get latest image
image = self._view_controller.last_img
photo = ImageTk.PhotoImage(image)
# display it
self.cnvs_camera.create_image((0,0), image=photo, anchor=NW)
#self.cnvs_camera.configure(image = photo)
self.image = photo
def update_callback(self):
self.update_image()
class InfoFrame(Frame, object):
""" UI Frame displaying information and status. """
def __init__(self, master, view_controller):
super(InfoFrame, self).__init__(master, bd=1, relief=SUNKEN)
self._view_controller = view_controller
Label(self, text = "Location Info:", pady=6, anchor=W, justify=LEFT).grid(row=0, columnspan=2, sticky=W)
# latitude
Label(self, text = "Latitude:", padx=3, anchor=W, justify=LEFT).grid(row=1, sticky=W)
Label(self, textvariable=view_controller.model.GPS_latitude).grid(row=1, column=1)
# longitude
Label(self, text = "Longitude:", padx=3, anchor=W, justify=LEFT).grid(row=2, sticky=W)
Label(self, textvariable=view_controller.model.GPS_longitude).grid(row=2, column=1)
# compass heading info
Label(self, text = "Compass Heading:", padx=3, anchor=W, justify=LEFT).grid(row=3, sticky=W)
Label(self, textvariable=view_controller.model.compass_heading).grid(row=3, column=1)
# GPS heading info
Label(self, text = "GPS Heading:", padx=3, anchor=W, justify=LEFT).grid(row=4, sticky=W)
Label(self, textvariable=view_controller.model.GPS_heading).grid(row=4, column=1)
Label(self, text = "GPS Speed (knots):", padx=3, anchor=W, justify=LEFT).grid(row=5, sticky=W)
Label(self, textvariable=view_controller.model.GPS_speed).grid(row=5, column=1)
Label(self, text = "GPS Altitude:", padx=3, anchor=W, justify=LEFT).grid(row=6, sticky=W)
Label(self, textvariable=view_controller.model.GPS_altitude).grid(row=6, column=1)
# GPS status
Checkbutton(self, text="GPX fix?", font=tkFont.Font(weight="bold"), state=DISABLED, variable=view_controller.model.GPS_fix).grid(row=7, column=0, columnspan=2, sticky=E)
Label(self, text = "# satellites:", padx=3, anchor=W, justify=LEFT).grid(row=8, sticky=W)
Label(self, textvariable=view_controller.model.GPS_satellite_count).grid(row=8, column=1)
Label(self, text = "Other Info:", pady=6, anchor=W, justify=LEFT).grid(row=9, columnspan=2, sticky=W)
# date and time
Label(self, text = "Time:", padx=3, anchor=W, justify=LEFT).grid(row=10, sticky=W)
Label(self, textvariable=view_controller.model.time).grid(row=10, column=1)
Label(self, text = "Date:", padx=3, anchor=W, justify=LEFT).grid(row=11, sticky=W)
Label(self, textvariable=view_controller.model.date).grid(row=11, column=1)
Label(self, text = "Temperature:", padx=3, anchor=W, justify=LEFT).grid(row=12, sticky=W)
Label(self, textvariable=view_controller.model.temperature).grid(row=12, column=1)
class ControlsFrame(Frame, object):
""" UI Frame displaying controls for heading and throttle. """
def __init__(self, master, view_controller):
super(ControlsFrame, self).__init__(master, bd=1, relief=SUNKEN)
self._view_controller = view_controller
Label(self, text = "Control Mode:", pady=6, bd=1, anchor=W, justify=LEFT).pack(fill=X, padx=2, expand=True)
# top frame
self.top_frame = Frame(self)
self.top_frame.pack(fill=X)
# mode buttons
self.btn_manual = Button(self.top_frame, text="Manual", command=self.on_set_manual_mode)
self.btn_manual.config(relief=SUNKEN)
self.btn_manual.pack(side=LEFT, padx=3)
self.btn_pause = Button(self.top_frame, text="Pause", command=self.on_pause)
self.btn_pause.pack(side=LEFT)
self.btn_auto = Button(self.top_frame, text="AutoPilot", command=self.on_set_auto_pilot_mode)
self.btn_auto.pack(side=LEFT, padx=3)
# centre frame
self.lbl_heading = Label(self, text = "Steering (Manual)", pady=6, bd=1, anchor=W, justify=LEFT)
self.lbl_heading.pack(fill=X, padx=2, expand=True)
# rudder heading
self.scl_rudder = Scale(self, orient=HORIZONTAL, from_=-45, to=45, command=self.on_rudder)
self.scl_rudder.set(0)
self.scl_rudder.pack(fill=X, expand=True, padx=5)
self.btn_zero_heading = Button(self, text="Centre Rudder", command=self.on_zero_heading)
|
MostlyOpen/odoo_addons_jcafb
|
myo_professional_cst/__openerp__.py
|
Python
|
agpl-3.0
| 1,550
| 0.000645
|
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This p
|
rogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
############################
|
###################################################
{
'name': 'Professional (customizations for CLVhealth-JCAFB Solution)',
'summary': 'Professional Module customizations for CLVhealth-JCAFB Solution.',
'version': '2.0.0',
'author': 'Carlos Eduardo Vercelino - CLVsol',
'category': 'Generic Modules/Others',
'license': 'AGPL-3',
'website': 'http://clvsol.com',
'depends': [
'myo_professional',
'hr',
],
'data': [
'views/professional_view.xml',
],
'demo': [],
'test': [],
'init_xml': [],
'test': [],
'update_xml': [],
'installable': True,
'application': False,
'active': False,
'css': [],
}
|
pmagwene/unscanny
|
scanit.py
|
Python
|
gpl-3.0
| 1,722
| 0.013937
|
#!/usr/bin/env python
import sys
import argparse
import collections
import numpy as np
import tifffile as TIFF
import sane
import click
import toml
def quick_scan(settings = {}, test = False):
"""Make scan using first scanning device found by SANE driver.
"""
# init and find devices
sane.init()
devices = sane.get_devices(localOnly
|
= True)
if test:
devices = [("test", "SANE", "SA
|
NE", "SANE")]
settings["source"]= "Flatbed"
settings["test_picture"] = "Color pattern"
settings["mode"] = "Color"
settings["resolution"] = 75
settings["depth"] = 8
if not len(devices):
return None
dev_name = devices[0][0]
# open scanner
scanner = sane.open(dev_name)
# set options
if "mode" in settings:
scanner.mode = settings["mode"]
for (key, value) in settings.items():
setattr(scanner, key, value)
img = scanner.arr_scan()
scanner.close()
sane.exit()
return img
@click.command()
@click.option("--test/--no-test", default=False, show_default = True,
help = "Use 'test' scanner backend.")
@click.argument("scanner_file",
type = click.Path(exists=True, dir_okay=False))
@click.argument("out_file",
type = click.Path(exists=False, dir_okay=False))
def main(scanner_file, out_file, test):
scanner_settings = toml.load(scanner_file)
if "scanner" not in scanner_settings:
print("Scanner info missing from settings file")
sys.exit(1)
img = quick_scan(scanner_settings["scanner"], test = test)
if img is None:
sys.exit(1)
TIFF.imsave(out_file, img)
if __name__ == "__main__":
main()
|
jedie/django-secure-js-login
|
tests/test_utils/selenium_test_cases.py
|
Python
|
gpl-3.0
| 5,276
| 0.003033
|
# coding: utf-8
"""
Secure JavaScript Login
~~~~~~~~~~~~~~~~~~~~~~~
:copyleft: 2012-2015 by the secure-js-login team, see AUTHORS for more details.
:created: by JensDiemer.de
:license: GNU GPL v3 or above, see LICENSE for more details
"""
from __future__ import unicode_literals, print_function
import sys
import traceback
from django.conf import settings
from django.utils.importlib import import_module
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.http import HttpResponse, SimpleCookie
from django_tools.unittest_utils.selenium_utils import selenium2fakes_response
try:
import selenium
from selenium import webdriver
from selenium.common.exceptions import WebDriverException, UnexpectedAlertPresentException, \
StaleElementReferenceException, TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.common.alert import Alert
except ImportError as err:
selenium_import_error = err
else:
selenium_import_error = None
from tests.test_utils.base_test_cases import SecureLoginBaseTestCase
class SeleniumTestCase(StaticLiveServerTestCase, SecureLoginBaseTestCase):
"""
http://selenium-python.readthedocs.org/
"""
@classmethod
def setUpClass(cls):
super(SeleniumTestCase, cls).setUpClass()
cls.driver = webdriver.Firefox()
cls.driver.set_window_size(800,600)
cls.driver.set_window_position(0,0)
binary_path=cls.driver.binary._start_cmd
user_agent = cls.driver.execute_script("return navigator.userAgent;")
print(
(
"\nUsed browser binary: %s\n"
"user agent: %s\n"
) % (binary_path, user_agent)
)
@classmethod
def tearDownClass(cls):
try:
cls.driver.quit()
except:
pass
super(SeleniumTestCase, cls).tearDownClass()
def setUp(self):
super(SeleniumTestCase, self).setUp()
self.driver.delete_all_cookies()
def _wait(self, conditions, timeout=5, msg="wait timeout"):
"""
Wait for the given condition.
Display page_source on error.
"""
try:
check = WebDriverWait(self.driver, timeout).until(
conditions
)
except TimeoutException as err:
print("\nError: %s\n%s\npage source:\n%s\n" % (msg, err, self.driver.page_source))
raise
else:
self.assertTrue(check)
def get_faked_response(self):
"""
Create a similar 'testing-response' [1] here.
So that some of the django testing assertions [2] can be used
with selenium tests, too ;)
Currently not available:
* response.status_code
* response.redirect_chain
* response.templates
* response.context
Available:
* response.content
* response.cookies
* response.client.cookies
* response.session
[1] https://docs.djangoproject.com/en/1.7/topics/testing/tools/#testing-responses
[2] https://docs.djangoproject.com/en/1.7/topics/testing/tools/#assertions
"""
return selenium2fakes_response(self.driver, self.client, self.client_class)
def _verbose_assertion_error(self, err):
sys.stderr.write("\n\n")
sys.stderr.flush()
sys.stderr.write("*" * 79)
sys.stderr.write("\n")
traceback.print_exc()
sys.stderr.write(" -" * 40)
sys.stderr.write("\n")
try:
page_source = self.driver.page_source
except Exception as e:
print("Can't get 'driver.page_
|
source': %s" % e)
else:
page_source = "\n".join([line for line in page_source.splitlines() if line.rstrip()])
print(page_source, file=sys.stderr)
sys.stderr.write("*" * 79)
sys.stderr.write("\n")
sys.stderr.write("\n\n")
sys.stderr.flush()
raise # raise the origin error
def assertNoJavaScriptAltert(self):
alert = expected_conditions.alert_is_present()(self.driver)
if alert != False:
alert_t
|
ext = alert.text
alert.accept() # Confirm a alert dialog, otherwise access to driver.page_source will failed!
try:
raise self.failureException("Alert is preset: %s" % alert_text)
except AssertionError as err:
self._verbose_assertion_error(err)
def assertEqualTitle(self, should):
try:
self.assertEqual(self.driver.title, should)
except AssertionError as err:
self._verbose_assertion_error(err)
def assertInPageSource(self, member):
try:
self.assertIn(member, self.driver.page_source)
except AssertionError as err:
self._verbose_assertion_error(err)
def assertNotInPageSource(self, member):
try:
self.assertNotIn(member, self.driver.page_source)
except AssertionError as err:
self._verbose_assertion_error(err)
|
dmS0Zq/ganeti_webmgr
|
ganeti_webmgr/ganeti_web/views/importing.py
|
Python
|
gpl-2.0
| 7,249
| 0
|
# Copyright (C) 2010 Oregon State University et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
from collections import defaultdict
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.shortcuts import render_to_response
from django.template import RequestContext
from ..forms.importing import ImportForm, OrphanForm, VirtualMachineForm
from .generic import NO_PRIVS
from ganeti_webmgr.clusters.models import Cluster
from ganeti_webmgr.virtualmachines.models import VirtualMachine
@login_required
def orphans(request):
"""
displays list of orphaned VirtualMachines, i.e. VirtualMachines without
an owner.
"""
user = request.user
if user.is_superuser:
clusters = Cluster.objects.all()
else:
clusters = user.get_objects_any_perms(Cluster, ['admin'])
if not clusters:
raise PermissionDenied(NO_PRIVS)
vms_with_cluster = VirtualMachine.objects.filter(owner=None,
cluster__in=clusters) \
.order_by('hostname').values_list('id', 'hostname', 'cluster')
if request.method == 'POST':
# strip cluster from vms
vms = [(i[0], i[1]) for i in vms_with_cluster]
# process updates if this was a form submission
form = OrphanForm(vms, request.POST)
if form.is_valid():
# update all selected VirtualMachines
data = form.cleaned_data
owner = data['owner']
vm_ids = data['virtual_machines']
# update the owner and save the vm. This isn't the most efficient
# way of updating the VMs but we would otherwise need to group them
# by cluster
orphaned = defaultdict(lambda: 0)
for id in vm_ids:
|
vm = VirtualMachine.objects.get(id=id)
vm.owner = owner
vm.save()
orphaned[vm.cluster_id] -= 1
# remove updated vms from the list
vms_wi
|
th_cluster = [i for i in vms_with_cluster
if unicode(i[0]) not in vm_ids]
else:
# strip cluster from vms
form = ImportForm([(i[0], i[1]) for i in vms_with_cluster])
clusterdict = {}
for i in clusters:
clusterdict[i.id] = i.hostname
vms = [(i[0], clusterdict[i[2]],
i[1]) for i in vms_with_cluster]
return render_to_response("ganeti/importing/orphans.html",
{'vms': vms,
'form': form, },
context_instance=RequestContext(request), )
@login_required
def missing_ganeti(request):
"""
View for displaying VirtualMachines missing from the ganeti cluster
"""
user = request.user
if user.is_superuser:
clusters = Cluster.objects.all()
else:
clusters = user.get_objects_any_perms(Cluster, ['admin'])
if not clusters:
raise PermissionDenied(NO_PRIVS)
vms = []
for cluster in clusters:
for vm in cluster.missing_in_ganeti:
vms.append((vm, vm))
if request.method == 'POST':
# process updates if this was a form submission
form = VirtualMachineForm(vms, request.POST)
if form.is_valid():
# update all selected VirtualMachines
data = form.cleaned_data
vm_ids = data['virtual_machines']
q = VirtualMachine.objects.filter(hostname__in=vm_ids)
missing = defaultdict(lambda: 0)
for i in q:
missing[i.cluster_id] -= 1
q.delete()
# remove updated vms from the list
vms = filter(lambda x: unicode(x[0]) not in vm_ids, vms)
else:
form = VirtualMachineForm(vms)
vms = {}
for cluster in clusters:
for vm in cluster.missing_in_ganeti:
vms[vm] = (cluster.hostname, vm)
vmhostnames = vms.keys()
vmhostnames.sort()
vms_tuplelist = []
for i in vmhostnames:
vms_tuplelist.append((i, vms[i][0], vms[i][1]))
vms = vms_tuplelist
return render_to_response("ganeti/importing/missing.html",
{'vms': vms,
'form': form, },
context_instance=RequestContext(request), )
@login_required
def missing_db(request):
"""
View for displaying VirtualMachines missing from the database
"""
user = request.user
if user.is_superuser:
clusters = Cluster.objects.all()
else:
clusters = user.get_objects_any_perms(Cluster, ['admin'])
if not clusters:
raise PermissionDenied(NO_PRIVS)
vms = []
for cluster in clusters:
for hostname in cluster.missing_in_db:
vms.append(('%s:%s' % (cluster.id, hostname), hostname))
if request.method == 'POST':
# process updates if this was a form submission
form = ImportForm(vms, request.POST)
if form.is_valid():
# update all selected VirtualMachines
data = form.cleaned_data
owner = data['owner']
vm_ids = data['virtual_machines']
import_ready = defaultdict(lambda: 0)
orphaned = defaultdict(lambda: 0)
# create missing VMs
for vm in vm_ids:
cluster_id, host = vm.split(':')
cluster = Cluster.objects.get(id=cluster_id)
VirtualMachine(hostname=host, cluster=cluster,
owner=owner).save()
import_ready[cluster.pk] -= 1
if owner is None:
orphaned[cluster.pk] += 1
# remove created vms from the list
vms = filter(lambda x: unicode(x[0])
not in vm_ids, vms)
else:
form = ImportForm(vms)
vms = {}
for cluster in clusters:
for hostname in cluster.missing_in_db:
vms[hostname] = (u'%s:%s' % (cluster.id, hostname),
unicode(cluster.hostname), unicode(hostname))
vmhostnames = vms.keys()
vmhostnames.sort()
vms_tuplelist = []
for i in vmhostnames:
vms_tuplelist.append(vms[i])
vms = vms_tuplelist
return render_to_response("ganeti/importing/missing_db.html",
{'vms': vms,
'form': form,
},
context_instance=RequestContext(request), )
|
calve/cerberus
|
cerberus/cerberus.py
|
Python
|
isc
| 45,426
| 0.000154
|
"""
Extensible validation for Python dictionaries.
This module implements Cerberus Validator class
:copyright: 2012-2015 by Nicola Iarocci.
:license: ISC, see LICENSE for more details.
Full documentation is available at http://python-cerberus.org
"""
from collections import Callable, Hashable, Iterable, Mapping, MutableMapping, \
Sequence
import copy
from datetime import datetime
from . import errors
import json
import re
import sys
if sys.version_info[0] == 3:
_str_type = str
_int_types = (int,)
else:
_str_type = basestring # noqa
_int_types = (int, long) # noqa
class DocumentError(Exception):
""" Raised when the target document is missing or has the wrong format """
pass
class SchemaError(Exception):
""" Raised when the validation schema is missing, has the wrong format or
contains errors.
"""
pass
class Validator(object):
""" Validator class. Normalizes and validates any mapping against a
validation-schema which is provided as an argument at class instantiation
or upon calling the :func:`validate`, :func:`validated` or
:func:`normalized` method.
:param schema: Optional validation schema, can also be provided upon
processing.
:param transparent_schema_rules: If ``True`` unknown schema rules will be
ignored; no SchemaError will be raised.
Defaults to ``False``. Useful you need to
extend the schema grammar beyond Cerberus'
domain.
:param ignore_none_values: If ``True`` it will ignore fields with``None``-
values when validating. Defaults to ``False``.
Useful if your document is composed from
function-kwargs with ``None``-defaults.
:param allow_unknown: If ``True`` unknown fields that are not defined in
the schema will be ignored.
If a ``dict`` with a definition-schema is given, any
undefined field will be validated against its rules.
Defaults to ``False``.
.. versionadded:: 0.10
'normalized'-method
'*of'-rules can be extended by another rule
'validation_rules'-property
'rename'-rule renames a field to a given string
'rename_handler'-rule for unknown fields
'purge_unknown'-property and conditional purging of unknown fields added
'trail'-property of Validator that relates the 'document' to
'root_document'
.. versionchanged:: 0.10
refactoring
.. versionchanged:: 0.9.1
|
'required' will always be validated, regardless of any dependencies.
.. versionadded:: 0.9
'anyof', 'noneof', 'allof', 'anyof' validation rules.
PyPy support.
'coerce' rule.
'propertyschema' validation rule.
'validator.validated' takes a doc
|
ument as argument and returns a
validated document or 'None' if validation failed.
.. versionchanged:: 0.9
Use 'str.format' in error messages so if someone wants to override them
does not get an exception if arguments are not passed.
'keyschema' is renamed to 'valueschema'. Closes #92.
'type' can be a list of valid types.
Usages of 'document' to 'self.document' in '_validate'.
When 'items' is applied to a list, field name is used as key for
'validator.errors', and offending field indexes are used as keys for
Field errors ({'a_list_of_strings': {1: 'not a string'}})
Additional kwargs that are passed to the __init__-method of an
instance of Validator-(sub-)class are passed to child-validators.
Ensure that additional **kwargs of a subclass persist through
validation.
Improve failure message when testing against multiple types.
Ignore 'keyschema' when not a mapping.
Ignore 'schema' when not a sequence.
'allow_unknown' can also be set for nested dicts. Closes #75.
Raise SchemaError when an unallowed 'type' is used in conjunction with
'schema' rule.
.. versionchanged:: 0.8.1
'dependencies' for sub-document fields. Closes #64.
'readonly' should be validated before any other validation. Closes #63.
'allow_unknown' does not apply to sub-dictionaries in a list.
Closes #67.
update mode does not ignore required fields in subdocuments. Closes #72.
'allow_unknown' does not respect custom rules. Closes #66.
.. versionadded:: 0.8
'dependencies' also support a dict of dependencies.
'allow_unknown' can be a schema used to validate unknown fields.
Support for function-based validation mode.
.. versionchanged:: 0.7.2
Successfully validate int as a float type.
.. versionchanged:: 0.7.1
Validator options like 'allow_unknown' and 'ignore_none_values' are now
taken into consideration when validating sub-dictionaries.
Make self.document always the root level document.
Up-front validation for schemas.
.. versionadded:: 0.7
'keyschema' validation rule.
'regex' validation rule.
'dependencies' validation rule.
'mix', 'max' now apply on floats and numbers too. Closes #30.
'set' data type.
.. versionadded:: 0.6
'number' (integer or float) validator.
.. versionchanged:: 0.5.0
``validator.errors`` returns a dict where keys are document fields and
values are validation errors.
.. versionchanged:: 0.4.0
:func:`validate_update` is deprecated. Use :func:`validate` with
``update=True`` instead.
Type validation is always performed first (only exception being
``nullable``). On failure, it blocks other rules on the same field.
Closes #18.
.. versionadded:: 0.2.0
`self.errors` returns an empty list when validate() has not been called.
Option so allow nullable field values.
Option to allow unknown key/value pairs.
.. versionadded:: 0.1.0
Option to ignore None values for type checking.
.. versionadded:: 0.0.3
Support for transparent schema rules.
Added new 'empty' rule for string fields.
.. versionadded:: 0.0.2
Support for addition and validation of custom data types.
"""
def __init__(self, *args, **kwargs):
""" The arguments will be treated as with this signature:
__init__(self, schema=None, transparent_schema_rules=False,
ignore_none_values=False, allow_unknown=False,
purge_unknown=False)
"""
self.document = None
self._errors = {}
self.root_document = None
self.trail = ()
self.update = False
""" Assign args to kwargs and store configuration. """
signature = ('schema', 'transparent_schema_rules',
'ignore_none_values', 'allow_unknown', 'purge_unknown')
for i, p in enumerate(signature[:len(args)]):
if p in kwargs:
raise TypeError("__init__ got multiple values for argument "
"'%s'" % p)
else:
kwargs[p] = args[i]
self.__config = kwargs
self.validation_rules = self.__introspect_validation_rules()
self._schema = DefinitionSchema(self, kwargs.get('schema', ()))
def __introspect_validation_rules(self):
rules = ['_'.join(x.split('_')[2:]) for x in dir(self)
if x.startswith('_validate')]
return tuple(rules)
def __call__(self, *args, **kwargs):
return self.validate(*args, **kwargs)
def _error(self, field, _error):
field_errors = self._errors.get(field, [])
if not isinstance(field_errors, list):
field_errors = [field_errors]
if isinstance(_error, (_str_type, dict)):
field_errors.append(_error)
else:
field_errors.extend(_error)
|
nicolaiarocci/eve-oauth2
|
run.py
|
Python
|
bsd-3-clause
| 917
| 0.001091
|
# -*- coding: utf-8 -*-
"""
Eve Demo (Secured)
~~~~~~~~~~~~~~~~~~
This is a fork of Eve Demo (https://github.com/pyeve/eve-demo)
intended to demonstrate how a Eve API can be secured by means of
Flask-Sentinel.
For demonstration purposes, besides protecting a couple API endpoints
with a BearerToken class instance, we are also adding a
|
static html
endpoint an protecting with via decorator.
:copyright: (c) 2015 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
from oauth2 import BearerAuth
from flask.ext.sentinel import ResourceOwnerPasswordCredentials, oauth
app = Eve(auth=BearerAuth)
ResourceOwnerPasswordCredentials(app)
@app.route('/endpoint')
@oauth.require_oauth()
def restricted_access():
return "You made it
|
through and accessed the protected resource!"
if __name__ == '__main__':
app.run(ssl_context='adhoc')
|
s-leger/archipack
|
pygeos/op_valid.py
|
Python
|
gpl-3.0
| 31,278
| 0.000959
|
# -*- coding:utf-8 -*-
# ##### BEGIN LGPL LICENSE BLOCK #####
# GEOS - Geometry Engine Open Source
# http://geos.osgeo.org
#
# Copyright (C) 2011 Sandro Santilli <strk@kbt.io>
# Copyright (C) 2005 2006 Refractions Research Inc.
# Copyright (C) 2001-2002 Vivid Solutions Inc.
# Copyright (C) 1995 Olivier Devillers <Olivier.Devillers@sophia.inria.fr>
#
# This is free software you can redistribute and/or modify it under
# the terms of the GNU Lesser General Public Licence as published
# by the Free Software Foundation.
# See the COPYING file for more information.
#
# ##### END LGPL LICENSE BLOCK #####
# <pep8 compliant>
# ----------------------------------------------------------
# Partial port (version 3.7.0) by: Stephen Leger (s-leger)
#
# ----------------------------------------------------------
from .geomgraph import (
PlanarGraph,
GeometryGraph,
EdgeRing,
Node,
DirectedEdgeStar
)
from .algorithms import (
CGAlgorithms,
LineIntersector,
MCPointInRing
)
from .index_strtree import STRtree
from .shared import (
logger,
GeomTypeId,
Position,
Location
)
from .op_relate import (
RelateNodeGraph
|
)
from .op_overlay import (
MinimalEdgeRing,
MaximalEdgeRing,
OverlayNodeFactory
)
class TopologyErrors():
eError = 0
eRepeatedPoint = 1
eHoleOutsideShell = 2
eNestedHoles = 3
eDisconnectedInterior = 4
eSelfIntersection = 5
eRingSelfIntersection = 6
eNestedShells = 7
eDuplicatedRings = 8
eTooFewPoints = 9
eInvalidCoor
|
dinate = 10
eRingNotClosed = 11
msg = (
"Topology Validation Error",
"Repeated Point",
"Hole lies outside exterior",
"Holes are nested",
"Interior is disconnected",
"Self-intersection",
"Ring Self-intersection",
"Nested exteriors",
"Duplicate Rings",
"Too few points in geometry component",
"Invalid Coordinate",
"Ring is not closed")
class TopologyValidationError():
def __init__(self, errorType: int, coord=None):
self.errorType = errorType
self.coord = coord
@property
def message(self):
return TopologyErrors.msg[self.errorType]
def __str__(self):
return "{} at or near point:{}".format(self.message, self.coord)
class ConsistentAreaTester():
"""
* Checks that a geomgraph.GeometryGraph representing an area
* (a geom.Polygon or geom.MultiPolygon)
* has consistent semantics for area geometries.
* This check is required for any reasonable polygonal model
* (including the OGC-SFS model, as well as models which allow ring
* self-intersection at single points)
*
* Checks include:
*
* - test for rings which properly intersect
* (but not for ring self-intersection, or intersections at vertices)
* - test for consistent labelling at all node points
* (this detects vertex intersections with invalid topology,
* i.e. where the exterior side of an edge lies in the interiors of the area)
* - test for duplicate rings
*
* If an inconsistency is found the location of the problem
* is recorded and is available to the caller.
"""
def __init__(self, graph):
"""
* Creates a new tester for consistent areas.
*
* @param geomGraph the topology graph of the area geometry.
* Caller keeps responsibility for its deletion
"""
self._li = LineIntersector()
# GeometryGraph
self._graph = graph
self._nodeGraph = RelateNodeGraph()
# the intersection point found (if any)
# Coordinate
self.invalidPoint = None
@property
def isNodeConsistentArea(self):
"""
* Check all nodes to see if their labels are consistent with
* area topology.
*
* @return true if this area has a consistent node
* labelling
* To fully check validity, it is necessary to
* compute ALL intersections, including self-intersections within a single edge.
"""
# SegmentIntersector
intersector = self._graph.computeSelfNodes(self._li, True, True)
if intersector.hasProper:
logger.debug("ConsistentAreaTester SegmentIntersector.hasProper")
self.invalidPoint = intersector.properIntersectionPoint
return False
self._nodeGraph.build(self._graph)
return self.isNodeEdgeAreaLabelsConsistent
@property
def isNodeEdgeAreaLabelsConsistent(self):
"""
* Check all nodes to see if their labels are consistent.
* If any are not, return false
"""
map = self._nodeGraph.nodes
for node in map:
if not node.star.isAreaLabelsConsistent(self._graph):
logger.debug("ConsistentAreaTester !star.isAreaLabelsConsistent")
self.invalidPoint = node.coord
return False
return True
@property
def hasDuplicateRings(self):
"""
* Checks for two duplicate rings in an area.
* Duplicate rings are rings that are topologically equal
* (that is, which have the same sequence of points up to point order).
* If the area is topologically consistent (determined by calling the
* isNodeConsistentArea,
* duplicate rings can be found by checking for EdgeBundles which contain
* more than one geomgraph.EdgeEnd.
* (This is because topologically consistent areas cannot have two rings sharing
* the same line segment, unless the rings are equal).
* The start point of one of the equal rings will be placed in
* invalidPoint.
*
* @return true if this area Geometry is topologically consistent but has two duplicate rings
"""
map = self._nodeGraph.nodes
for node in map:
# EdgeEndStar
star = node.star
# EdgeEndBundle
for eeb in star:
if len(eeb._edgeEnds) > 1:
logger.debug("ConsistentAreaTester.hasDuplicateRings")
self.invalidPoint = eeb.edge.coords[0]
return True
return False
class ConnectedInteriorTester():
"""
* This class tests that the interiors of an area Geometry
* (Polygon or MultiPolygon)
* is connected.
*
* An area Geometry is invalid if the interiors is disconnected.
* This can happen if:
*
* - one or more interiors either form a chain touching the exterior at two places
* - one or more interiors form a ring around a portion of the interiors
*
* If an inconsistency if found the location of the problem
* is recorded.
"""
def __init__(self, newGeomgraph):
# GeometryFactory
self._factory = None
# GeometryGraph
self._graph = newGeomgraph
# Coordinate
self.invalidPoint = None
@property
def isInteriorsConnected(self):
# Edge
splitEdges = []
self._graph.computeSplitEdges(splitEdges)
# PlanarGraph
graph = PlanarGraph(OverlayNodeFactory())
graph.addEdges(splitEdges)
self._setInteriorEdgesInResult(graph)
graph.linkResultDirectedEdges()
# EdgeRing
edgeRings = []
self._buildEdgeRings(graph._edgeEnds, edgeRings)
"""
* Mark all the edges for the edgeRings corresponding to the exteriors
* of the input polygons.
*
* Only ONE ring gets marked for each exterior - if there are others
* which remain unmarked this indicates a disconnected interiors.
"""
self._visitShellInteriors(self._graph.geom, graph)
"""
* If there are
|
amerlyq/airy
|
ranger/plugins/macro_date.py
|
Python
|
mit
| 866
| 0
|
# Compatible with ranger 1.6.0 through 1.7.*
#
# This plugin adds the new macro %date which is substituted with the current
# date in commands
|
that allow macros. You can test it with the command
# ":shell echo %date; read"
# from __future__ import (absolute_import, division, print_function)
import time
import ranger.core.actions
# Save the original macro function
GET_MAC
|
ROS_OLD = ranger.core.actions.Actions.get_macros
# Define a new macro function
def get_macros_with_date(self):
macros = GET_MACROS_OLD(self)
macros['dt'] = time.strftime('%Y%m%d')
macros['dT'] = time.strftime('%Y-%m-%d')
macros['dw'] = time.strftime('%Y-%m-%d-%a')
macros['dW'] = time.strftime('%Y-%m-%d-%a-W%W')
macros['date'] = time.strftime('%Y/%m/%d')
return macros
# Overwrite the old one
ranger.core.actions.Actions.get_macros = get_macros_with_date
|
algorythmic/bash-completion
|
test/t/test_ncftp.py
|
Python
|
gpl-2.0
| 245
| 0
|
import pytest
class TestNcftp:
@pytest.mark.complete("ncftp ")
def test_1(
|
self, completion):
assert completion
|
@pytest.mark.complete("ncftp -", require_cmd=True)
def test_2(self, completion):
assert completion
|
apruden/opal
|
opal-python-client/src/main/python/opal/perm_project.py
|
Python
|
gpl-3.0
| 1,309
| 0.00382
|
"""
Apply permissions on a project.
"""
import sys
import opal.core
import opal.perm
PERMISSIONS = {
'administrate': 'PROJECT_ALL'
}
def add_arguments(parser):
"""
Add command specific options
"""
opal.perm.add_permission_arguments(parser, PERMISSIONS.keys())
parser.add_argument('--project', '-pr', required=True, help='Project name')
def do_command(args):
"""
Execute permi
|
ssion command
"""
# Build and send requests
try:
opal.perm.validate_args(args, PERMISSIONS)
request = opal.core.OpalClient.build(opal.core.OpalClient.LoginInfo.parse(args)).new_request()
if args.verbose:
request.verbose()
# send request
if args.delete:
request.delete()
else:
request.post()
try:
response = request.resource(opal.perm.do_ws
|
(args, ['project', args.project, 'permissions', 'project'], PERMISSIONS)).send()
except Exception, e:
print Exception, e
# format response
if response.code != 200:
print response.content
except Exception, e:
print e
sys.exit(2)
except pycurl.error, error:
errno, errstr = error
print >> sys.stderr, 'An error occurred: ', errstr
sys.exit(2)
|
VirgiliaBeatrice/VocabularyGenerator
|
src/sqlbase.py
|
Python
|
mit
| 4,534
| 0.001764
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Created by iFantastic on 15/09/05
import sqlite3
#TODO: Add more class method for Database Class.
class Database():
def __init__(self, db_name):
self.db = sqlite3.connect(db_name.decode('utf-8'))
# self.db = sqlite3.connect(':memory:')
self.cr = self.db.cursor()
def create_table(self, table_name, columns_detail):
command = [
('', False),
'CREATE TABLE',
table_name,
columns_detail
]
# print create_str(command)
try:
self.cr.execute(create_str(command))
except sqlite3.OperationalError, e:
print 'Exception: ' + e.message
def delete_table(self, table_name):
command = [
('', False),
'DROP TABLE',
table_name
]
try:
self.cr.execute(create_str(command))
except sqlite3.OperationalError, e:
print 'Exception: ' + e.message
def query(self, table_name, columns_name, conditions):
command = [
('', False),
'SELECT',
columns_name,
'FROM',
table_name,
'WHERE',
conditions
]
print create_str(command)
self.cr.execute(create_str(comma
|
nd))
return self.cr.fetchall()
def update(self, table_name, values, conditions):
command = [
('', False),
'UPDATE',
table_name,
'SET',
values,
'WHERE',
conditions
]
print create_str(command)
self.cr.execute(create_str(command))
self
|
.db.commit()
def insert(self, table_name, values):
query_qmark = ['?' for dummy_idx in range(len(values))]
query_qmark.insert(0, (',', True))
command = [
('', False),
'INSERT INTO',
table_name,
'VALUES',
query_qmark
]
print create_str(command)
self.cr.execute(create_str(command), values)
self.db.commit()
def disconnect(self):
self.db.close()
def create_str(target):
return_str = u''
if isinstance(target, dict):
iter_target = target.copy()
sep = iter_target.pop('sep')
hasBracket = iter_target.pop('hasBracket')
for element_key, element_value in iter_target.iteritems():
return_str += (element_key.decode('utf-8') + u'=' + element_value.decode('utf-8') +
sep.decode('utf-8') + u' ')
else:
sep = target[0][0]
hasBracket = target[0][1]
for element in target[1:]:
if isinstance(element, tuple) or isinstance(element, list) or isinstance(element, dict):
return_str += create_str(element) + sep.decode('utf-8') + u' '
else:
return_str += element.decode('utf-8') + sep.decode('utf-8') + u' '
if hasBracket:
return u'(' + return_str[:-len(sep.decode('utf-8') + u' ')] + u')'
else:
return return_str[:-len(sep.decode('utf-8') + u' ')]
# columns = [
# (',', True),
# (('', False), 'Word', 'TEXT'),
# (('', False), 'Pronunciation', 'TEXT'),
# (('', False), 'Grammar', 'TEXT'),
# (('', False), 'Definition', 'TEXT'),
# (('', False), 'Example', 'TEXT')
# ]
# command_1 = [
# ('', False),
# 'CREATE',
# 'TABLE',
# 'table_name',
# columns
# ]
# print create_str(command_1)
# cur.execute("insert into people values (?, ?)", (who, age))
# UPDATE Customers
# SET ContactName='Alfred Schmidt', City='Hamburg'
# WHERE CustomerName='Alfreds Futterkiste';
if __name__ == '__main__':
db = Database('Vocabulary.db')
columns = [
(',', True),
(('', False), 'Word', 'TEXT'),
(('', False), 'Pronunciation', 'TEXT'),
(('', False), 'Grammar', 'TEXT'),
(('', False), 'Definition', 'TEXT'),
(('', False), 'Example', 'TEXT')
]
# print create_tuple_str(columns[1], ',')
# db.delete_table('Vocabulary')
# db.create_table('Vocabulary', columns)
# db.insert('Vocabulary', ('A', 'A', 'A', 'A', 'A'))
# db.update('Vocabulary', {'sep': ',', 'hasBracket': False, 'Word': '\'abandon\'', 'Pronunciation': '\'aa\''},
# {'sep': '', 'hasBracket': False, 'Word': '\'A\''})
print db.query('Vocabulary', ((',', False), 'Word'), {'sep': '', 'hasBracket': False, 'Word': '\'abandon\''})
pass
|
markYoungH/chromium.src
|
tools/telemetry/telemetry/core/backends/chrome_inspector/inspector_backend.py
|
Python
|
bsd-3-clause
| 9,745
| 0.008722
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import sys
from telemetry import decorators
from telemetry.core import exceptions
from telemetry.core import util
from telemetry.core.backends.chrome_inspector import inspector_console
from telemetry.core.backends.chrome_inspector import inspector_memory
from telemetry.core.backends.chrome_inspector import inspector_network
from telemetry.core.backends.chrome_inspector import inspector_page
from telemetry.core.backends.chrome_inspector import inspector_runtime
from telemetry.core.backends.chrome_inspector import inspector_timeline
from telemetry.core.backends.chrome_inspector import inspector_websocket
from telemetry.core.backends.chrome_inspector import websocket
from telemetry.core.heap import model as heap_model_module
from telemetry.image_processing import image_util
from telemetry.timeline import model as timeline_model_module
from telemetry.timeline import recording_options
from telemetry.timeline import trace_data as trace_data_module
class InspectorException(Exception):
pass
class InspectorBackend(object):
def __init__(self, app, devtools_client, context, timeout=60):
self._websocket = inspector_websocket.InspectorWebsocket(self._HandleError)
self._websocket.RegisterDomain(
'Inspector', self._HandleInspectorDomainNotification)
self._app = app
self._devtools_client = devtools_client
# Be careful when using the context object, since the data may be
# outdated since this is never updated once InspectorBackend is
# created. Consider an updating strategy for this. (For an example
# of the subtlety, see the logic for self.url property.)
self._context = context
logging.debug('InspectorBackend._Connect() to %s', self.debugger_url)
try:
self._websocket.Connect(self.debugger_url)
except (websocket.WebSocketException, util.TimeoutException) as e:
raise InspectorException(e.msg)
self._console = inspector_console.InspectorConsole(self._websocket)
self._memory = inspector_memory.InspectorMemory(self._websocket)
self._page = inspector_page.InspectorPage(
self._websocket, timeout=timeout)
self._runtime = inspector_runtime.InspectorRuntime(self._websocket)
self._timeline = inspector_timeline.InspectorTimeline(self._websocket)
self._network = ins
|
pector_network.InspectorNetwork(self._websocket)
self._timeline_model = None
def __del__(self):
self._websocket.Disconnect()
@property
def app(self):
return self._app
@property
def url(self):
for c in self._devtools_client.ListInspectableContexts():
if c['id'] == self._context['id']:
return c['url']
return None
# TODO(chrishenry): Is this intentional? Shouldn't this return
# self._context['id'] instead?
@
|
property
def id(self):
return self.debugger_url
@property
def debugger_url(self):
return self._context['webSocketDebuggerUrl']
# Public methods implemented in JavaScript.
@property
@decorators.Cache
def screenshot_supported(self):
if (self.app.platform.GetOSName() == 'linux' and (
os.getenv('DISPLAY') not in [':0', ':0.0'])):
# Displays other than 0 mean we are likely running in something like
# xvfb where screenshotting doesn't work.
return False
return not self.EvaluateJavaScript("""
window.chrome.gpuBenchmarking === undefined ||
window.chrome.gpuBenchmarking.beginWindowSnapshotPNG === undefined
""")
def Screenshot(self, timeout):
assert self.screenshot_supported, 'Browser does not support screenshotting'
self.EvaluateJavaScript("""
if(!window.__telemetry) {
window.__telemetry = {}
}
window.__telemetry.snapshotComplete = false;
window.__telemetry.snapshotData = null;
window.chrome.gpuBenchmarking.beginWindowSnapshotPNG(
function(snapshot) {
window.__telemetry.snapshotData = snapshot;
window.__telemetry.snapshotComplete = true;
}
);
""")
def IsSnapshotComplete():
return self.EvaluateJavaScript(
'window.__telemetry.snapshotComplete')
util.WaitFor(IsSnapshotComplete, timeout)
snap = self.EvaluateJavaScript("""
(function() {
var data = window.__telemetry.snapshotData;
delete window.__telemetry.snapshotComplete;
delete window.__telemetry.snapshotData;
return data;
})()
""")
if snap:
return image_util.FromBase64Png(snap['data'])
return None
# Console public methods.
@property
def message_output_stream(self): # pylint: disable=E0202
return self._console.message_output_stream
@message_output_stream.setter
def message_output_stream(self, stream): # pylint: disable=E0202
self._console.message_output_stream = stream
# Memory public methods.
def GetDOMStats(self, timeout):
dom_counters = self._memory.GetDOMCounters(timeout)
return {
'document_count': dom_counters['documents'],
'node_count': dom_counters['nodes'],
'event_listener_count': dom_counters['jsEventListeners']
}
# Page public methods.
def WaitForNavigate(self, timeout):
self._page.WaitForNavigate(timeout)
def Navigate(self, url, script_to_evaluate_on_commit, timeout):
self._page.Navigate(url, script_to_evaluate_on_commit, timeout)
def GetCookieByName(self, name, timeout):
return self._page.GetCookieByName(name, timeout)
# Runtime public methods.
def ExecuteJavaScript(self, expr, context_id=None, timeout=60):
self._runtime.Execute(expr, context_id, timeout)
def EvaluateJavaScript(self, expr, context_id=None, timeout=60):
return self._runtime.Evaluate(expr, context_id, timeout)
def EnableAllContexts(self):
return self._runtime.EnableAllContexts()
# Timeline public methods.
@property
def timeline_model(self):
return self._timeline_model
def StartTimelineRecording(self, options=None):
if not options:
options = recording_options.TimelineRecordingOptions()
if options.record_timeline:
self._timeline.Start()
if options.record_network:
self._network.timeline_recorder.Start()
def StopTimelineRecording(self):
builder = trace_data_module.TraceDataBuilder()
data = self._timeline.Stop()
if data:
builder.AddEventsTo(trace_data_module.INSPECTOR_TRACE_PART, data)
data = self._network.timeline_recorder.Stop()
if data:
builder.AddEventsTo(trace_data_module.INSPECTOR_TRACE_PART, data)
if builder.HasEventsFor(trace_data_module.INSPECTOR_TRACE_PART):
self._timeline_model = timeline_model_module.TimelineModel(
builder.AsData(), shift_world_to_zero=False)
else:
self._timeline_model = None
@property
def is_timeline_recording_running(self):
return self._timeline.is_timeline_recording_running
# Network public methods.
def ClearCache(self):
self._network.ClearCache()
# Methods used internally by other backends.
def _IsInspectable(self):
contexts = self._devtools_client.ListInspectableContexts()
return self._context['id'] in [c['id'] for c in contexts]
def _HandleInspectorDomainNotification(self, res):
if (res['method'] == 'Inspector.detached' and
res.get('params', {}).get('reason', '') == 'replaced_with_devtools'):
self._WaitForInspectorToGoAwayAndReconnect()
return
if res['method'] == 'Inspector.targetCrashed':
raise exceptions.DevtoolsTargetCrashException(self.app)
def _HandleError(self, elapsed_time):
if self._IsInspectable():
raise exceptions.DevtoolsTargetCrashException(self.app,
'Received a socket error in the browser connection and the tab '
'still exists, assuming it timed out. '
'Elapsed=%ds Error=%s' % (elapsed_time, sys.exc_info()[1]))
raise exceptions.DevtoolsTargetCrashException(self.app,
'Received a socket error in the browser connection and the tab no '
'longer exists, assuming
|
WheatonCS/Lexos
|
lexos/receivers/base_receiver.py
|
Python
|
mit
| 1,956
| 0
|
"""This is the base receiver for the base model."""
from flask import request
from typing import Optional, Union, Dict, List
RequestData = Dict[str, Union[str, dict, List[dict]]]
class BaseReceiver:
"""This is the base receiver class for
|
the base model."""
def __init__(self):
"""Use base model for all the models.
used to handle requests and other common stuff
"""
pass
@property
def _front_end_data_nullable(self) -> Optional[RequestData]:
"""Get nullable front-end data.
the front end data, possibly None:
- if not
|
in an request context, you will get None
- if no request data is sent in current request, you will get None
:return: the front end data, possibly None.
"""
try:
return self._get_all_options_from_front_end()
except RuntimeError: # working out of request context
return None
@property
def _front_end_data(self) -> RequestData:
"""Get null-safe version of front end data.
:return: all the front end data pack in a dict
"""
assert self._front_end_data_nullable is not None
return self._front_end_data_nullable
@staticmethod
def _get_all_options_from_front_end() -> Optional[RequestData]:
"""Get all the options from front end.
This function is not null-safe:
(which means this function is possible to return a none).
:return: a optional dict with data from front end
"""
if request.json:
return dict(request.json)
elif request.form:
return request.form
else:
return None
def options_from_front_end(self):
"""Pack specific option needed using the virtual method.
find all the option needed and pack them into a struct.
Needs to be implemented in other receivers
"""
raise NotImplementedError
|
estuans/django-oscar-gmerchant
|
gmerchant/migrations/0007_auto__add_field_googleproduct_google_shopping_updated.py
|
Python
|
bsd-3-clause
| 13,944
| 0.00753
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'GoogleProduct.google_shopping_updated'
db.add_column(u'gmerchant_googleproduct', 'google_shopping_updated',
self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'GoogleProduct.google_shopping_updated'
db.delete_column(u'gmerchant_googleproduct', 'google_shopping_updated')
models = {
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('oscar.models.fields.autoslugfield.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '128', 'separator': "u'-'", 'blank': 'True', 'unique': 'True', 'populate_from': "'name'", 'overwrite': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'products'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': "orm['catalogue.ProductClass']"}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null
|
': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'slug': ('django.db.mode
|
ls.fields.SlugField', [], {'max_length': '255'}),
'structure': ('django.db.models.fields.CharField', [], {'default': "'standalone'", 'max_length': '10'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'upc': ('oscar.models.fields.NullCharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'unique_together': "(('attribute', 'product'),)", 'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'entity_content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'entity_object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
|
IEMLdev/propositions-restful-server
|
ieml/dictionary/table/table_structure.py
|
Python
|
gpl-3.0
| 3,891
| 0.003341
|
import sys
from collections import defaultdict
from itertools import chain
from ieml import error
from ieml.commons import logger
from ieml.dictionary.table.table import *
class TableStructure:
# define a forest of root paradigm
# This class defines :
# - for each paradigm :
# o the parent table
# o the coordinates of each cells
# the table structure defines the rank for the paradigms
def __init__(self, scripts, roots):
tables, root_paradigms = self._build_tables(roots, scripts)
self.tables = tables
self.roots = root_paradigms
self.table_to_root = {t: r for r, t_s in self.roots.items() for t in t_s}
# self.table_to_root = {t: r for r, t_s in self.roots.items() for t in t_s}
def root(self, s):
try:
return self.table_to_root[self.tables[s]]
except KeyError as e:
error("No root defined for the script {}".format(str(s)))
return None
def __iter__(self):
yield from self.tables.values()
def __getitem__(self, item):
return self.tables[item]
def __contains__(self, item):
return item in self.tables
def children(self, table: Table):
"""
:param table:
:return:
"""
return {t for t in self.tables.values() if t.parent == table}
@staticmethod
def _define_root(root, paradigms):
# add a new root paradigm to the tree structure
root_table = table_class(root)(root, parent=None)
tables = {root_table}
cells = set()
for ss in root.singular_sequences:
cell = Cell(script=ss, parent=root_table)
cells.add(cell)
defined = {root_table}
for s in sorted(set(paradigms) - {root}, key=len, reverse=True):
if s in tables:
raise ValueError("Already defined")
# continue
candidates = set()
for t in defined:
accept, regular = t.accept_script(s)
if accept:
candidates |= {(t, regular)}
if len(candidates) == 0:
logger.info("TableStructure._define_root: No parent candidate for the table produced by script %s "
"ignoring this script." % (str(s)))
continue
if len(candidates) > 1:
logger.info("TableStructure._define_root: Multiple parent candidate for the table produced by script %s: {%s} "
"choosing the smaller one." % (str(s), ', '.join([str(c[0]) for c in candidates])))
parent, regular = min(candidates, key=lambda t: t[0].script)
table = table_class(s)(script=s,
parent=parent,
regular=regular)
tables.add(table)
defined.add(table)
return tables, cells
@staticmethod
def _build_tables(root_scripts, scripts):
roots = defaultdict(list)
root_ss = {}
for root in root_scripts:
for ss in root.singular_sequences:
root_ss[ss] = root
# assign each paradigm to its root paradigm
for s in scripts:
if s.cardinal == 1:
continue
if s.singular_sequences[0] not in root_ss:
logger.error(s.singular_seque
|
nces[0] + " not found")
continue
roots[root_ss[s.singular_sequences[0]]].append(s)
root_paradigms
|
= {}
for root in root_scripts:
tables, cells = TableStructure._define_root(root=root, paradigms=roots[root])
root_paradigms[root] = tables | cells
tables = {}
for t in chain.from_iterable(root_paradigms.values()):
tables[t.script] = t
return tables, root_paradigms
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyKDE4/kdecore/KDateTime.py
|
Python
|
gpl-2.0
| 5,923
| 0.011312
|
# encoding: utf-8
# module PyKDE4.kdecore
# from /usr/lib/python3/dist-packages/PyKDE4/kdecore.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtNetwork as __PyQt4_QtNetwork
class KDateTime(): # skipped bases: <class 'sip.wrapper'>
# no doc
def addDays(self, *args, **kwargs)
|
: # real signature unknown
pass
def addMonths(self, *args, **kwargs): # real signature unknown
pass
def addMSecs(self, *args, **kwargs): # real signature unknown
pass
def addSecs(self, *args, **kwargs): # real signature unknown
pass
def addYears(self, *args, **kwargs): # real sig
|
nature unknown
pass
def compare(self, *args, **kwargs): # real signature unknown
pass
def currentDateTime(self, *args, **kwargs): # real signature unknown
pass
def currentLocalDate(self, *args, **kwargs): # real signature unknown
pass
def currentLocalDateTime(self, *args, **kwargs): # real signature unknown
pass
def currentLocalTime(self, *args, **kwargs): # real signature unknown
pass
def currentUtcDateTime(self, *args, **kwargs): # real signature unknown
pass
def date(self, *args, **kwargs): # real signature unknown
pass
def dateTime(self, *args, **kwargs): # real signature unknown
pass
def daysTo(self, *args, **kwargs): # real signature unknown
pass
def detach(self, *args, **kwargs): # real signature unknown
pass
def fromString(self, *args, **kwargs): # real signature unknown
pass
def isClockTime(self, *args, **kwargs): # real signature unknown
pass
def isDateOnly(self, *args, **kwargs): # real signature unknown
pass
def isLocalZone(self, *args, **kwargs): # real signature unknown
pass
def isNull(self, *args, **kwargs): # real signature unknown
pass
def isOffsetFromUtc(self, *args, **kwargs): # real signature unknown
pass
def isSecondOccurrence(self, *args, **kwargs): # real signature unknown
pass
def isUtc(self, *args, **kwargs): # real signature unknown
pass
def isValid(self, *args, **kwargs): # real signature unknown
pass
def outOfRange(self, *args, **kwargs): # real signature unknown
pass
def realCurrentLocalDateTime(self, *args, **kwargs): # real signature unknown
pass
def secsTo(self, *args, **kwargs): # real signature unknown
pass
def secsTo_long(self, *args, **kwargs): # real signature unknown
pass
def setDate(self, *args, **kwargs): # real signature unknown
pass
def setDateOnly(self, *args, **kwargs): # real signature unknown
pass
def setDateTime(self, *args, **kwargs): # real signature unknown
pass
def setFromStringDefault(self, *args, **kwargs): # real signature unknown
pass
def setSecondOccurrence(self, *args, **kwargs): # real signature unknown
pass
def setSimulatedSystemTime(self, *args, **kwargs): # real signature unknown
pass
def setTime(self, *args, **kwargs): # real signature unknown
pass
def setTimeSpec(self, *args, **kwargs): # real signature unknown
pass
def setTime_t(self, *args, **kwargs): # real signature unknown
pass
def time(self, *args, **kwargs): # real signature unknown
pass
def timeSpec(self, *args, **kwargs): # real signature unknown
pass
def timeType(self, *args, **kwargs): # real signature unknown
pass
def timeZone(self, *args, **kwargs): # real signature unknown
pass
def toClockTime(self, *args, **kwargs): # real signature unknown
pass
def toLocalZone(self, *args, **kwargs): # real signature unknown
pass
def toOffsetFromUtc(self, *args, **kwargs): # real signature unknown
pass
def toString(self, *args, **kwargs): # real signature unknown
pass
def toTimeSpec(self, *args, **kwargs): # real signature unknown
pass
def toTime_t(self, *args, **kwargs): # real signature unknown
pass
def toUtc(self, *args, **kwargs): # real signature unknown
pass
def toZone(self, *args, **kwargs): # real signature unknown
pass
def utcOffset(self, *args, **kwargs): # real signature unknown
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
After = 16
AtEnd = 8
AtStart = 2
Before = 1
ClockTime = 5
Comparison = None # (!) real value is ''
EndsAt = 15
Equal = 14
Inside = 4
Invalid = 0
ISODate = 0
LocalDate = 4
LocalZone = 4
OffsetFromUTC = 2
Outside = 31
QtTextDate = 3
RFC3339Date = 5
RFCDate = 1
RFCDateDay = 2
Spec = None # (!) real value is ''
SpecType = None # (!) real value is ''
StartsAt = 30
TimeFormat = None # (!) real value is ''
TimeZone = 3
UTC = 1
__hash__ = None
|
knaffe/Face_Recog_sys
|
person_detectior/detect_motion.py
|
Python
|
mit
| 2,007
| 0.014449
|
'''
Date : 2017-4-21
Author : Chilam
Application : Person Detector based on OpenCV HOG and SVM detector
'''
# import the necessary packages
from __future__ import print_function
from imutils.object_detection import non_max_suppression
from imutils import paths
import numpy as np
import argparse
import imutils
import cv2
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--images", required=True, help="path to images directory")
args = vars(ap.parse_args())
# initialize the HOG descriptor/person detector
hog = cv2.HOGDescriptor()
hog.setSVMDetector(cv2.HOGDescriptor_getDefaultPeopleDetector())
# loop over the image paths
for imagePath in paths.list_images(args["images"]):
# load the image and resize it to (1) reduce detection tim
|
e
# and (2) improve detection accuracy
image = cv2.imread(imagePath)
image = imutils.resize(image, width=min(400, image.shape[1]))
orig = image.copy()
# detect people in the ima
|
ge
(rects, weights) = hog.detectMultiScale(image, winStride=(4, 4),
padding=(8, 8), scale=1.05)
# draw the original bounding boxes
for (x, y, w, h) in rects:
cv2.rectangle(orig, (x, y), (x + w, y + h), (0, 0, 255), 2)
# apply non-maxima suppression to the bounding boxes using a
# fairly large overlap threshold to try to maintain overlapping
# boxes that are still people
rects = np.array([[x, y, x + w, y + h] for (x, y, w, h) in rects])
pick = non_max_suppression(rects, probs=None, overlapThresh=0.65)
# draw the final bounding boxes
for (xA, yA, xB, yB) in pick:
cv2.rectangle(image, (xA, yA), (xB, yB), (0, 255, 0), 2)
# show some information on the number of bounding boxes
filename = imagePath[imagePath.rfind("/") + 1:]
print("[INFO] {}: {} original boxes, {} after suppression".format(
filename, len(rects), len(pick)))
# show the output images
cv2.imshow("Before NMS", orig)
cv2.imshow("After NMS", image)
cv2.waitKey(0)
|
Azure/azure-sdk-for-python
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_12_01/_compute_management_client.py
|
Python
|
mit
| 12,428
| 0.004425
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Optional, TYPE_CHECKING
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
from . import models
from ._configuration import ComputeManagementClientConfiguration
from .operations import AvailabilitySetsOperations, DedicatedHostGroupsOperations, DedicatedHostsOperations, GalleriesOperations, GalleryApplicationVersionsOperations, GalleryApplicationsOperations, GalleryImageVersionsOperations, GalleryImagesOperations, ImagesOperations, LogAnalyticsOperations, Operations, ProximityPlacementGroupsOperations, SshPublicKeysOperations, UsageOperations, VirtualMachineExtensionImagesOperations, VirtualMachineExtensionsOperations, VirtualMachineImagesOperations, VirtualMachineRunCommandsOper
|
ations, VirtualMachineScaleSetExtensionsOperations, VirtualMachineScaleSetRollingUpgradesOperations, VirtualMachineScaleSetVMExtensionsOperations, VirtualMachineScaleSetVMsOperations, VirtualMachineScaleSetsOperations, VirtualMachineSizesOperations, VirtualMachinesOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import Token
|
Credential
class ComputeManagementClient:
"""Compute Client.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.compute.v2019_12_01.operations.Operations
:ivar availability_sets: AvailabilitySetsOperations operations
:vartype availability_sets:
azure.mgmt.compute.v2019_12_01.operations.AvailabilitySetsOperations
:ivar proximity_placement_groups: ProximityPlacementGroupsOperations operations
:vartype proximity_placement_groups:
azure.mgmt.compute.v2019_12_01.operations.ProximityPlacementGroupsOperations
:ivar dedicated_host_groups: DedicatedHostGroupsOperations operations
:vartype dedicated_host_groups:
azure.mgmt.compute.v2019_12_01.operations.DedicatedHostGroupsOperations
:ivar dedicated_hosts: DedicatedHostsOperations operations
:vartype dedicated_hosts: azure.mgmt.compute.v2019_12_01.operations.DedicatedHostsOperations
:ivar ssh_public_keys: SshPublicKeysOperations operations
:vartype ssh_public_keys: azure.mgmt.compute.v2019_12_01.operations.SshPublicKeysOperations
:ivar virtual_machine_extension_images: VirtualMachineExtensionImagesOperations operations
:vartype virtual_machine_extension_images:
azure.mgmt.compute.v2019_12_01.operations.VirtualMachineExtensionImagesOperations
:ivar virtual_machine_extensions: VirtualMachineExtensionsOperations operations
:vartype virtual_machine_extensions:
azure.mgmt.compute.v2019_12_01.operations.VirtualMachineExtensionsOperations
:ivar virtual_machine_images: VirtualMachineImagesOperations operations
:vartype virtual_machine_images:
azure.mgmt.compute.v2019_12_01.operations.VirtualMachineImagesOperations
:ivar usage: UsageOperations operations
:vartype usage: azure.mgmt.compute.v2019_12_01.operations.UsageOperations
:ivar virtual_machines: VirtualMachinesOperations operations
:vartype virtual_machines: azure.mgmt.compute.v2019_12_01.operations.VirtualMachinesOperations
:ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
:vartype virtual_machine_sizes:
azure.mgmt.compute.v2019_12_01.operations.VirtualMachineSizesOperations
:ivar images: ImagesOperations operations
:vartype images: azure.mgmt.compute.v2019_12_01.operations.ImagesOperations
:ivar virtual_machine_scale_sets: VirtualMachineScaleSetsOperations operations
:vartype virtual_machine_scale_sets:
azure.mgmt.compute.v2019_12_01.operations.VirtualMachineScaleSetsOperations
:ivar virtual_machine_scale_set_extensions: VirtualMachineScaleSetExtensionsOperations
operations
:vartype virtual_machine_scale_set_extensions:
azure.mgmt.compute.v2019_12_01.operations.VirtualMachineScaleSetExtensionsOperations
:ivar virtual_machine_scale_set_rolling_upgrades:
VirtualMachineScaleSetRollingUpgradesOperations operations
:vartype virtual_machine_scale_set_rolling_upgrades:
azure.mgmt.compute.v2019_12_01.operations.VirtualMachineScaleSetRollingUpgradesOperations
:ivar virtual_machine_scale_set_vm_extensions: VirtualMachineScaleSetVMExtensionsOperations
operations
:vartype virtual_machine_scale_set_vm_extensions:
azure.mgmt.compute.v2019_12_01.operations.VirtualMachineScaleSetVMExtensionsOperations
:ivar virtual_machine_scale_set_vms: VirtualMachineScaleSetVMsOperations operations
:vartype virtual_machine_scale_set_vms:
azure.mgmt.compute.v2019_12_01.operations.VirtualMachineScaleSetVMsOperations
:ivar log_analytics: LogAnalyticsOperations operations
:vartype log_analytics: azure.mgmt.compute.v2019_12_01.operations.LogAnalyticsOperations
:ivar virtual_machine_run_commands: VirtualMachineRunCommandsOperations operations
:vartype virtual_machine_run_commands:
azure.mgmt.compute.v2019_12_01.operations.VirtualMachineRunCommandsOperations
:ivar galleries: GalleriesOperations operations
:vartype galleries: azure.mgmt.compute.v2019_12_01.operations.GalleriesOperations
:ivar gallery_images: GalleryImagesOperations operations
:vartype gallery_images: azure.mgmt.compute.v2019_12_01.operations.GalleryImagesOperations
:ivar gallery_image_versions: GalleryImageVersionsOperations operations
:vartype gallery_image_versions:
azure.mgmt.compute.v2019_12_01.operations.GalleryImageVersionsOperations
:ivar gallery_applications: GalleryApplicationsOperations operations
:vartype gallery_applications:
azure.mgmt.compute.v2019_12_01.operations.GalleryApplicationsOperations
:ivar gallery_application_versions: GalleryApplicationVersionsOperations operations
:vartype gallery_application_versions:
azure.mgmt.compute.v2019_12_01.operations.GalleryApplicationVersionsOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: Subscription credentials which uniquely identify Microsoft Azure
subscription. The subscription ID forms part of the URI for every service call.
:type subscription_id: str
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = ComputeManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.availability_sets = AvailabilitySetsOperations(self._client, self._config, self._serialize, self._deserialize)
self.proximity_placement_groups = ProximityPlacementGroupsOperations(self._client, self._config, self._serialize, self._deserialize)
self.dedicated_host_groups = DedicatedHostGroupsOperations(self._client, self._config, self._serialize,
|
schreiberx/sweet
|
tests/20_platforms_job_generation/benchmark_create_job_scripts.py
|
Python
|
mit
| 3,673
| 0.002723
|
#! /usr/bin/env python3
import sys
from itertools import product
from mule_local.JobMule import *
from mule.exec_program import *
from mule.InfoError import *
jg = JobGeneration()
"""
Compile parameters
"""
params_compile_sweet_mpi = ['enable', 'disable']
params_compile_threading = ['omp', 'off']
params_compile_thread_parallel_sum = ['enable', 'disable']
jg.compile.program = 'swe_sphere'
jg.compile.plane_spectral_space = 'disable'
jg.compile.plane_spectral_dealiasing = 'disable'
jg.compile.sphere_spectral_space = 'enable'
jg.compile.sphere_spectral_dealiasing = 'enable'
jg.compile.quadmath = 'enable'
jg.unique_id_filter = ['runtime.simparams', 'parallelization', 'benchmark', 'runtime.rexi_params']
"""
Runtime parameters
"""
params_runtime_timestep_sizes = [30]
jg.runtime.benchmark_name = 'geostrophic_balance_linear'
jg.runtime.space_res_spectral = 128
jg.runtime.space_res_physical = None
jg.runtime.compute_error = 1
# run 10 time steps
jg.runtime.max_simulation_time = 10*30
# Use moderate CI-REXI values
# Set later on
#jg.runtime.rexi_method = 'ci'
jg.runtime.rexi_ci_n = 16
jg.runtime.rexi_ci_max_real = 1
jg.runtime.rexi_ci_max_imag = 1
jg.runtime.rexi_ci_mu = 0
jg.runtime.rexi_ci_primitive = 'circle'
jg.runtime.rexi_sphere_preallocation = 1
jg.runtime.instability_checks = 0
jg.runtime.verbosity = 10
# output results after end
jg.runtime.output_timestep_size = jg.runtime.max_simulation_time
jg.runtime.output_filename = "-"
"""
Parallelization parameters
"""
# Update TIME parallelization
ptime = JobParallelizationDimOptions('time')
ptime.num_cores_per_rank = 1
ptime.num_threads_per_rank = 1
if jg.platform_resources.num_cores_per_node <= 1:
ptime.num_ranks = 1
else:
ptime.num_ranks = 2
pspace = JobParallelizationDimOptions('space')
pspace.num_cores_per_rank = 1
pspace.num_threads_per_rank = jg.platform_resources.num_cores_per_socket//ptime.num_threads_per_rank
pspace.num_ranks = 1
if pspace.num_threads_per_rank == 0:
pspace.num_threads_per_rank = 1
# Setup parallelization
jg.setup_parallelization([pspace, ptime])
ts_methods = [
['l_erk', 4, 4, 0], # reference solution
['l_erk', 2, 2, 0],
['lg_erk_lc_erk', 2, 2, 0],
['l_irk', 2, 2, 0],
['lg_irk_lc_erk', 2, 2, 0],
['l_exp', 2, 2, 0],
#['lg_exp_lc_erk', 2, 2, 0],
]
#
# allow including this file
#
if __name__ == "__main__":
#
# Create job scripts
#
for tsm in ts_methods[1:]:
jg.runtime.timestepping_method = tsm[0]
jg.runtime.timestepping_order = tsm[1]
jg.runtime.timestepping_order2 = tsm[2]
if len(tsm) > 4:
s = tsm[4]
jg.runtime.load_from_dict(tsm[4])
for jg.runtime.timestep_size in params_runtime_timestep_sizes:
for (
jg.compile.threading,
jg.compile.rexi_thread_parallel_sum,
|
jg.compile.sweet_mpi
) in product(
params_compile_threading,
params_compile_thread_parallel_sum,
params_compile_sweet_mpi
):
if 'exp' in jg.runtime
|
.timestepping_method:
jg.runtime.rexi_method = 'ci'
jg.gen_jobscript_directory()
jg.runtime.rexi_method = ''
else:
if jg.compile.sweet_mpi == 'enable':
continue
if jg.compile.rexi_thread_parallel_sum == 'enable':
continue
jg.gen_jobscript_directory()
|
DarthMaulware/EquationGroupLeaks
|
Leak #1 - Equation Group Cyber Weapons Auction - Invitation/EQGRP-Free-File/Firewall/EXPLOITS/ELCO/fosho/requests/structures.py
|
Python
|
unlicense
| 1,453
| 0.002753
|
# -*- coding: utf-8 -*-
"""
requests.structures
~~~~~~~~~~~~~~~~~~~
Data structures that power Requests.
"""
class CaseInsensitiveDict(dict):
''''''
@property
def lower_keys(self):
if not hasattr(self, '_lower_keys') or not self._lower_keys:
self._lower_keys = dict((k.lower(), k) for k in list(self.keys()))
return self._lower_keys
def _clear_lower_keys(self):
if hasattr(self, '_lower_keys'):
self._lower_keys.clear()
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
self._clear_lower_keys()
def __delitem__(self, key):
dict.__delitem__(self, key)
self._lower_keys.clear()
def __contains__(self, key):
return key.lower() in self.lower_keys
def __getitem__(self, key):
##
if key in self:
retu
|
rn dict.__getitem__(self, self.lower_keys[key.lower()])
def get(self, key, default=None):
if key in self:
return self[key]
else:
return default
class LookupDict(dict):
''''''
def __init__(self, name=None):
self.name = name
super(LookupDict, self).__init__()
def __repr__(self):
return '<lookup \'%s\'>'
|
% (self.name)
def __getitem__(self, key):
##
return self.__dict__.get(key, None)
def get(self, key, default=None):
return self.__dict__.get(key, default)
|
vignettist/image-import
|
import_classify.py
|
Python
|
mit
| 4,535
| 0.005072
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import re
import sys
import tarfile
import numpy as np
from six.moves import urllib
import tensorflow as tf
import time
class Classifier:
def __init__(self, prefix):
self.DATA_URL = 'http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz'
self.PREFIX_DIR = prefix
# classify_image_graph_def.pb:
# Binary representation of the GraphDef protocol buffer.
# imagenet_synset_to_human_label_map.txt:
# Map from synset ID to a human readable string.
# imagenet_2012_challenge_label_map_proto.pbtxt:
# Text representation of a protocol buffer mapping a label to synset ID.
tf.app.flags.DEFINE_string(
'model_dir', '/Users/loganw/Documents/models/imagenet',
"""Path to classify_image_graph_def.pb, """
"""imagenet_synset_to_human_label_map.txt, and """
"""imagenet_2012_challenge_label_map_proto.pbtxt.""")
self.FLAGS = tf.app.flags.FLAGS
self.maybe_download_and_extract()
def create_graph(self):
"""Creates a graph from saved GraphDef file and returns a saver."""
# Creates graph from saved graph_def.pb.
with tf.gfile.FastGFile(os.path.join(self.FLAGS.model_dir, 'classify_image_graph_def.pb'), 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(graph_def, name='')
def maybe_d
|
ownload_and_extract(self):
"""Download and extract model tar file.
If the pretrained model we're using doesn't already ex
|
ist, this function
downloads it from the TensorFlow.org website and unpacks it into a directory.
"""
dest_directory = self.FLAGS.model_dir
if not os.path.exists(dest_directory):
os.makedirs(dest_directory)
filename = self.DATA_URL.split('/')[-1]
filepath = os.path.join(dest_directory, filename)
if not os.path.exists(filepath):
def _progress(count, block_size, total_size):
sys.stdout.write('\r>> Downloading %s %.1f%%' %
(filename,
float(count * block_size) / float(total_size) * 100.0))
sys.stdout.flush()
filepath, _ = urllib.request.urlretrieve(self.DATA_URL,
filepath,
_progress)
print()
statinfo = os.stat(filepath)
print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.')
tarfile.open(filepath, 'r:gz').extractall(dest_directory)
def run_inference_on_images(self, images):
# Creates graph from saved GraphDef.
self.create_graph()
with tf.Session() as sess:
# Some useful tensors:
# 'softmax:0': A tensor containing the normalized prediction across
# 1000 labels.
# 'pool_3:0': A tensor containing the next-to-last layer containing 2048
# float description of the image.
# 'DecodeJpeg/contents:0': A tensor containing a string providing JPEG
# encoding of the image.
# Runs the softmax tensor by feeding the image_data as input to the graph.
softmax_tensor = sess.graph.get_tensor_by_name('softmax:0')
pool_tensor = sess.graph.get_tensor_by_name('pool_3:0')
# print(softmax_tensor)
t = time.time()
for i in range(len(images)):
if (i % 100) == 0:
elapsed = time.time() - t
t = time.time()
print( str(i) + '/' + str(len(images)) + ', ' + str(elapsed/100.0) + ' seconds per image')
image_data = tf.gfile.FastGFile(self.PREFIX_DIR + images[i]['resized_uris']['1280'], 'rb').read()
(predictions, pools) = sess.run((softmax_tensor, pool_tensor),
{'DecodeJpeg/contents:0': image_data})
predictions = np.squeeze(predictions).tolist()
pools = np.squeeze(pools).tolist()
images[i]['inception_pool'] = pools
images[i]['inception_classification'] = predictions
return images
|
googleapis/python-aiplatform
|
samples/generated_samples/aiplatform_generated_aiplatform_v1_dataset_service_list_annotations_sync.py
|
Python
|
apache-2.0
| 1,539
| 0.0013
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF AN
|
Y KIND, either express or implied.
# See the License for the specific la
|
nguage governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ListAnnotations
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_generated_aiplatform_v1_DatasetService_ListAnnotations_sync]
from google.cloud import aiplatform_v1
def sample_list_annotations():
# Create a client
client = aiplatform_v1.DatasetServiceClient()
# Initialize request argument(s)
request = aiplatform_v1.ListAnnotationsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_annotations(request=request)
# Handle the response
for response in page_result:
print(response)
# [END aiplatform_generated_aiplatform_v1_DatasetService_ListAnnotations_sync]
|
microcom/clouder
|
clouder_template_docker/__openerp__.py
|
Python
|
gpl-3.0
| 1,398
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron
# Copyright 2015, TODAY Clouder SASU
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License with Attribution
# clause as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License with
# Attribution clause along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
######################################
|
########################################
{
'name': 'Clouder Template Docker',
'version': '1.0',
'category': 'Clouder',
'depends': ['clouder'],
'author': 'Yannick Buron (Clouder)',
'license': 'Other OSI approved licence',
'website': 'htt
|
ps://github.com/clouder-community/clouder',
'description': """
Clouder Template Docker
""",
'demo': [],
'data': ['clouder_template_docker_data.xml'],
'installable': True,
'application': True,
}
|
dmccloskey/component-contribution
|
component_contribution/kegg_database.py
|
Python
|
mit
| 860
| 0.004651
|
# -*- coding: utf-8 -*-
"""
Created on Tue May 31 10:57:02 2016
@author: noore
"""
import bioservices.kegg
import pandas as pd
kegg = bioservices.kegg.KEGG
|
()
cid2name = kegg.list('cpd')
cid2name = filter(lambda x: len(x) == 2, map(lambda l : l.split('\t'), cid2name.split('\n')))
cid_df = pd.DataFrame(cid2name, columns=['cpd', 'names'])
cid_df['cpd'] = cid_df['cpd'].apply(lambda x: x[4:])
cid_df['name'] = cid_df['names'].apply(lambda s: s.split(';')[0])
cid_df.set_index('cpd', inplace=True)
cid_
|
df['inchi'] = None
for cid in cid_df.index[0:10]:
ChEBI = re.findall('ChEBI: ([\d\s]+)\n', kegg.get(cid))
if len(ChEBI) == 0:
print 'Cannot find a ChEBI for %s' % cid
elif len(ChEBI) > 1:
print 'Error parsing compound %s' % cid
else:
cid2chebi.at[cid, 'ChEBI'] = ChEBI[0]
cid2chebi.to_csv(settings.KEGG2CHEBI_FNAME)
|
ToonTownInfiniteRepo/ToontownInfinite
|
toontown/golf/DistributedPhysicsWorldAI.py
|
Python
|
mit
| 2,425
| 0.004536
|
from math import *
import math
import random, time
import BuildGeometry
from direct.directnotify import DirectNotifyGlobal
from direct.distributed import DistributedObjectAI
from pandac.PandaModules import *
from toontown.golf import PhysicsWorldBase
from toontown.toonbase import ToontownGlobals
class DistributedPhysicsWorldAI(DistributedObjectAI.DistributedObjectAI, PhysicsWorldBase.PhysicsWorldBase):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedPhysicsWorldAI')
def __init__(self, air):
DistributedObjectAI.DistributedObjectAI.__init__(self, air)
PhysicsWorldBase.PhysicsWorldBase.__init__(self, 0)
self.commonHoldData = None
self.storeAction = None
self.holdingUpObjectData = 0
return
def generate(self):
DistributedObjectAI.DistributedObjectAI.generate(self)
self.loadLevel()
self.setupSimulation()
def delete(self):
self.notify.debug('Calling DistributedObjectAI.delete')
DistributedObjectAI.DistributedObjectAI.delete(self)
self.notify.debug('Calling PhysicsWorldBase.delete')
PhysicsWorldBase.PhysicsWorldBase.delete(self)
def loadLevel(self):
pass
def createCommonObject(self, type, pos, hpr, sizeX = 0, sizeY = 0, moveDistance = 0):
commonObjectDatam = PhysicsWorldBase.PhysicsWorldBase.createCommonObject(self, type, None, pos, hpr, sizeX, sizeY, moveDistance)
self.sendUpdate('clientCommonObject', commonObjectDatam)
return
def updateCommonObjects(self):
self.sendUpdate('setCommonObjects', [self.getCommonObjectData()])
def doAction(self):
print 'doing Action'
print 'before performReadyAction'
self.performReadyAction()
self.storeAction = None
self.commonHoldData = None
return
def upSetCommonObjects(self, objectData):
se
|
lf.holdingUpObjectData = 1
self.commonHoldData = objectData
if self.storeAction:
self.doAction
|
()
def setupCommonObjects(self):
print 'setupCommonObjects'
print self.commonHoldData
if not self.commonHoldData:
return
elif self.commonHoldData[0][1] == 99:
print 'no common objects'
else:
self.useCommonObjectData(self.commonHoldData, 0)
def performReadyAction(self):
print 'Wrong performReadyAction'
|
yanadsl/ML-Autocar
|
test.py
|
Python
|
mit
| 519
| 0.00578
|
import sys
import pigpio
import time
from colorama import Fore, Back, Style
def set_speed(lspeed, rspeed):
pi.hardware_PWM(left_servo_pin, 800, int(lspeed)*10000)
pi.hardware_PWM(right_servo_pin, 800, int(rspeed)*10000)
pi = pigpio.pi()
left_servo_pin =
|
13
right_servo_pin = 12
dead_pin = 17
die_distance = 8
ls = 100
rs = 100
print("start")
try:
while True:
set_speed(ls, rs)
if pi.read(dead_pin) == pigpio.LOW:
set_speed(0, 0)
except :
set_speed(0, 0)
sys.e
|
xit(0)
|
mozilla/user-advocacy
|
lib/web_api/google_services.py
|
Python
|
mpl-2.0
| 4,807
| 0.008945
|
#!/usr/local/bin/python
"""
Handles Google Service Authentication
"""
# TODO(rrayborn): Better documentation
__author__ = "Rob Rayborn"
__copyright__ = "Copyright 2014, The Mozilla Foundation"
__license__ = "MPLv2"
__maintainer__ = "Rob Rayborn"
__email__ = "rrayborn@mozilla.com"
__status__ = "Development"
from OpenSSL.crypto import load_pkcs12, dump_privatekey, FILETYPE_PEM
from datetime import date, datetime, timedelta
from os import environ
import json
import jwt
import requests
import time
_SECRETS
|
_PATH = environ['SECRETS_PATH']
# Header and Grant Type are always the same for Google's API so making a
# variable instead of a file
_HEADER_JSON = {'alg':'RS256','typ':'jwt'}
_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:jwt-bearer'
# Default filenames
_CLAIMS_FILE = _SECRETS_PATH + 'claims.json'
_P12_FILE = _SECRETS_PATH + 'goog.p12'
_AUTH_FILE = _SECRETS_PATH + '.auth.tmp'
# Other default
|
s
_GOOG_PASSPHRASE = 'notasecret' # notasecret is the universal google passphrase
class google_service_connection(object):
def __init__(self, json_web_token=None, expiration=None, claims_file=_CLAIMS_FILE,
p12_file=_P12_FILE, auth_file=_AUTH_FILE):
self._json_web_token = None
self._expiration = None
self._auth_token = None
self._claims_file = claims_file
self._p12_file = p12_file
self._auth_file = auth_file
self.get_auth_token(json_web_token,expiration)
def get_expiration(self):
return self._expiration
def set_files(self, claims_file=None, p12_file=None,
auth_file=None):
self._claims_file = claims_file or self._claims_file
self._p12_file = p12_file or self._p12_file
self._auth_file = auth_file or self._auth_file
def _refresh_json_web_token(self, json_web_token=None, expiration=None,
force=False):
if not force and not _expired(self._expiration):
return
if json_web_token or expiration:
if json_web_token and expiration:
if not _expired(expiration):
self._json_web_token = json_web_token
self._expiration = expiration
return
#else continue
else:
raise Exception('_refresh_json_web_token: Must pass json_web_token'\
' and expiration together.')
with open(self._p12_file, 'r') as f:
pk = load_pkcs12(f.read(), _GOOG_PASSPHRASE).get_privatekey()
secret = dump_privatekey(FILETYPE_PEM, pk)
# Load claims json
with open(self._claims_file, 'r') as f:
claims_json = json.load(f)
# Modify claims data
current_time = int(time.time())
claims_json['iat'] = current_time
claims_json['exp'] = current_time + 3600 - 1
# Remember expiration
self._expiration = current_time + 3600
self._json_web_token = jwt.encode(
claims_json, secret, algorithm='RS256', headers=_HEADER_JSON
)
def _load_auth_token(self):
try:
with open(self._auth_file, 'r') as f:
auth_json = json.load(f)
if not _expired(auth_json['expiration']):
self._expiration = auth_json['expiration']
self._auth_token = auth_json['token']
return self._auth_token
else:
return None
except:
return None
def _save_auth_token(self):
with open(self._auth_file, 'w') as f:
data = {'token':self._auth_token, 'expiration':self._expiration}
json.dump(data, f)
def get_auth_token(self, json_web_token=None, expiration=None):
if self._load_auth_token():
return self._auth_token
self._refresh_json_web_token(json_web_token=json_web_token,
expiration=expiration)
parameters = {
'grant_type':_GRANT_TYPE,
'assertion':self._json_web_token
}
response = requests.post('https://accounts.google.com/o/oauth2/token',
data=parameters)
if response.status_code == 200:
self._auth_token = response.json()['access_token']
else:
raise Exception('Token Request results in a %s response code.' \
% response.status_code)
self._save_auth_token()
return self._auth_token
def _expired(exp):
return time.time() >= exp
def main():
gsc = google_service_connection()
if __name__ == '__main__':
main()
|
comandrei/django-shortcircuit
|
setup.py
|
Python
|
bsd-3-clause
| 2,291
| 0.000873
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def get_version(*file_paths):
filename = os.path.join(os.path.dirname(__file__), *file_paths)
version_file = open(filename).read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
version = get_version('shortcircuit', '__init__.py')
if sys.argv[-1] == 'publish':
try:
import wheel
except ImportError:
print('Wheel library missing. Please run "pip instal
|
l wheel"')
|
sys.exit()
os.system('python setup.py sdist upload')
os.system('python setup.py bdist_wheel upload')
sys.exit()
if sys.argv[-1] == 'tag':
print("Tagging the version on github:")
os.system("git tag -a %s -m 'version %s'" % (version, version))
os.system("git push --tags")
sys.exit()
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='django-shortcircuit',
version=version,
description="""Shortcircuit Django middlewares for a list of views you want skipped""",
long_description=readme + '\n\n' + history,
author='Andrei Coman',
author_email='comandrei@gmail.com',
url='https://github.com/comandrei/django-shortcircuit',
packages=[
'shortcircuit',
],
include_package_data=True,
install_requires=[
],
license="BSD",
zip_safe=False,
keywords='django-shortcircuit',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
beeldengeluid/linkedtv-editortool
|
src/linkedtv/text/TextAnalyzer.py
|
Python
|
gpl-2.0
| 1,003
| 0.013958
|
import codecs
import logging
logger = logging.getLogger(__name__)
class TextAnalyzer:
def __init__(self):
logger.debug('-- Initializing TextAnalyzer --')
"""
Deze functie leest een stopwoorden file (stoplist_tno.tab) in en retourneert deze woorden in
een dic
|
tionary
"""
def readStopWordsFile(self, strStopFile):
if not strStopFile:
strStopFile = self
|
._stopWordsFile
""" read stopwords from file as dictionary. """
stopWords = {}
try:
f = codecs.open(strStopFile,'rU','utf-8') # NB. Use 'U'-mode for UniversalNewline Support
for line in f.readlines():
word = line.partition('::')[0].strip()#.decode('utf-8')
stopWords[word] = 1
f.close()
except IOError, e:
msg = 'Can\'t open stopfile %s for reading. %s' % (strStopFile, str(e))
logger.error(msg)
return None
return stopWords
|
drweaver/py_garage_server
|
garage_state_mon.py
|
Python
|
mit
| 1,828
| 0.036105
|
from threading import Thread, Event
from time import sleep
from garage import Garage
import logging
from time import time
logger = logging.getLogger('garage_state_mon')
class LastStateTransitionMonitor(Thread):
def __init__(self, dao, config, state=Garage.closed, notify_callback=None):
Thread.__init__(self)
self._dao = dao
self._config = config
self._notify_callback = notify_callback
sel
|
f._state = state
self._stop_event = Event()
def check_now(self):
last_time = self._dao.last_state_transition_from(self._state)
if last_time is None:
logger.info("No notification required, already in "+self._state+" state")
return
if last_time == 0:
msg = 'Garage Door has never been '+self._state
else:
diff = int(( time() - las
|
t_time ) / 60)
self._config.reload()
limit = self._config['state_monitor_limit_mins']
if diff < limit: return
if diff > 99: diff_msg = str(round(diff/60))+' hours'
elif diff > 2880: diff_msg = str(round(diff/1440))+' days'
else: diff_msg = str(diff)+' minutes'
msg = 'Garage Door has not been '+self._state+' for '+diff_msg
logger.info(msg)
if self._notify_callback and self._config['state_monitor_enabled']: self._notify_callback(msg)
def run(self):
while 1:
self.check_now()
self._config.reload()
self._stop_event.wait(self._config['state_monitor_interval_mins'] * 60)
if( self._stop_event.is_set() ): break
def stop(self):
self._stop_event.set()
if __name__ == '__main__':
from garage_dao import GarageDao
from garage_config import GarageConfig
def callback(msg):
print(msg)
logging.basicConfig(filename=__file__+'.log',level=logging.DEBUG)
LastStateTransitionMonitor(GarageDao(),GarageConfig(), state=Garage.opening, notify_callback=callback).run()
|
chrcoe/code-kata
|
legacy/python/hashtable/__init__.py
|
Python
|
mit
| 4,107
| 0.000487
|
'''
Basic hash table to practice working through hash table logic...
'''
class HashTable():
def __init__(self):
CURRENT_SIZE = 10
self.table_size = CURRENT_SIZE
# self.table = [0] * self.table_size
# TODO: make this more dynamic .. check for collisions instead of filling every slot with a linked list (maybe?)
self.table = [SingleLinkedList() for x in range(CURRENT_SIZE)]
def insert(self, input_str):
# using chaining will require a linked list on all items..
# will also need to
|
resize if the
|
table is full...
# TODO: implement table resizing
self.table[self.__simple_str_hash(input_str)].append(input_str)
def remove(self, input_str):
self.table[self.__simple_str_hash(input_str)].remove(input_str)
def exists(self, input_str):
return self.table[self.__simple_str_hash(input_str)].exists(input_str)
def __resize_table(self):
# TODO: implement table resizing
pass
def __simple_str_hash(self, str_to_hash):
sum_ = 0
if not str_to_hash:
raise ValueError('invalid input string')
for char in str_to_hash:
sum_ += ord(char)
# to determine placement within the table
return sum_ % self.table_size
def __str__(self):
return '{}'.format(self.table)
class SingleLinkedList():
def __init__(self):
''' Create as empty linked list. '''
self.head = None
self.tail = None
def show(self):
curr_node = self.head
output = ''
while curr_node:
output += '{}, '.format(curr_node.data)
curr_node = curr_node.next
print('[{}]'.format(output))
def append(self, data):
# create the base node which holds the data
node = self.Node(data, None)
if self.head is None:
# set the head and tail to this node if there was no previous
# item in this LinkedList
self.head = self.tail = node
else:
# if there was an item, add the item to the end of the LinkedList
self.tail.next = node
self.tail = node
def remove(self, data):
# prime the function with the head item...
curr_node = self.head
prev_node = None
while curr_node:
# check for a match, then remove it from the list
if curr_node.data == data:
# remove the previous node's next to the current node's next
if prev_node:
prev_node.next = curr_node.next
else:
self.head = curr_node.next
prev_node = curr_node
curr_node = curr_node.next
def exists(self, data):
curr_node = self.head
while curr_node:
if curr_node.data == data:
# we found it!
return True
curr_node = curr_node.next
return False
def __str__(self):
return 'Head: {}, Tail: {}'.format(self.head, self.tail)
class Node():
def __init__(self, data, next_node):
self.data = data
self.next = next_node
def __str__(self):
return 'Data: {}, Next:{}'.format(self.data, self.next)
if __name__ == '__main__':
s = SingleLinkedList()
s.append(15)
s.show()
s.append(5)
s.show()
s.append(2)
s.show()
s.append(29)
s.show()
assert(s.exists(5) is True)
assert(s.exists(239847) is False)
s.remove(2)
s.show()
s.remove(5)
assert(s.exists(5) is False)
s.show()
s.remove(15)
s.show()
my_hash_table = HashTable()
my_hash_table.insert('Steve') # should be 3
my_hash_table.insert('Spark') # should be 9
my_hash_table.insert('Notes')
assert(my_hash_table.exists('Steve') is True) # should be true
assert(my_hash_table.exists('steve') is False) # should be false
my_hash_table.remove('Steve')
assert(my_hash_table.exists('Steve') is False) # should be false now
# print(my_hash_table)
|
facetothefate/contrail-controller
|
src/vnsw/opencontrail-vrouter-netns/opencontrail_vrouter_netns/vrouter_docker.py
|
Python
|
apache-2.0
| 10,235
| 0.000195
|
import argparse
import json
import netaddr
import os
import uuid
import docker
import sys
from docker.errors import APIError
from vrouter_netns import validate_uuid, NetnsManager
class VRouterDocker(object):
"""
Creates and drestoys service instance inside Docker.
It needs to be run as superuser to connect Docker network to linux netns
"""
def __init__(self, args_str=None):
self.args = None
self._client = docker.Client(version='1.14', timeout=30)
self._parse_args(args_str, sys.argv[1:])
def _parse_args(self, args_str=None, argv=None):
"""Return an argparse.ArgumentParser for me"""
if args_str is not None:
args = args_str.split()
elif argv is not None:
args = argv
else:
raise ValueError("argv or args_str must be suplied!")
print args
conf_parser = argparse.ArgumentParser(add_help=False)
args, remaining_argv = conf_parser.parse_known_args(args)
# Override with CLI options
# Don't surpress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser],
# print script description with -h/--help
description=__doc__,
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
)
subparsers = parser.add_subparsers()
create_parser = subparsers.add_parser('create')
create_parser.add_argument(
"vm_id",
help="Virtual machine UUID")
create_parser.add_argument(
"--vmi-left-id",
default=None,
help="Left virtual machine interface UUID")
create_parser.add_argument(
"--vmi-left-mac",
default=None,
help=("Left virtual machine interface MAC. Default: automatically "
"generated by the system"))
create_parser.add_argument(
"--vmi-left-ip",
default=None,
help=("Left virtual machine interface IPv4 and mask "
"(ie: a.a.a.a/bb). Default mask to /32"))
create_parser.add_argument(
"--vmi-right-id",
default=None,
help="Right virtual machine interface UUID")
create_parser.add_argument(
"--vmi-right-mac",
default=None,
help=("Right virtual machine interface MAC. Default: "
"automatically generated by the system"))
create_parser.add_argument(
"--vmi-right-ip",
default=None,
help=("Right virtual machine interface IPv4 and mask "
"(ie: a.a.a.a/bb). Defa
|
ult mask to /32"))
create_parser.add_argument(
|
"--vmi-management-id",
default=None,
help="Management virtual machine interface UUID")
create_parser.add_argument(
"--vmi-management-mac",
default=None,
help=("Management virtual machine interface MAC. Default: "
"automatically generated by the system"))
create_parser.add_argument(
"--vmi-management-ip",
default=None,
help=("Management virtual machine interface IPv4 and mask "
"(ie: a.a.a.a/bb). Default mask to /32"))
create_parser.add_argument(
"--image",
help="Image for a Docker container")
create_parser.add_argument(
"--command",
help="Command to run inside a Docker container")
create_parser.add_argument(
"--instance-data",
default=None,
help="Additional data as JSON string")
create_parser.add_argument(
"--update",
action="store_true",
default=False,
help="Update a created Docker container")
create_parser.set_defaults(func=self.create)
destroy_parser = subparsers.add_parser('destroy')
destroy_parser.add_argument(
"vm_id",
help="Virtual machine UUID")
destroy_parser.add_argument(
"--vmi-left-id",
help="Left virtual machine interface UUID")
destroy_parser.add_argument(
"--vmi-right-id",
help="Right virtual machine interface UUID")
destroy_parser.add_argument(
"--vmi-management-id",
default=None,
help="Management virtual machine interface UUID")
destroy_parser.set_defaults(func=self.destroy)
self.args = parser.parse_args(remaining_argv)
@staticmethod
def _create_nic_def(vmi_id, vmi_mac=None, vmi_ip=None):
nic = {}
if uuid.UUID(vmi_id).int:
nic['uuid'] = validate_uuid(vmi_id)
if vmi_mac:
nic['mac'] = netaddr.EUI(vmi_mac, dialect=netaddr.mac_unix)
else:
nic['mac'] = None
if vmi_ip:
nic['ip'] = netaddr.IPNetwork(vmi_ip)
else:
nic['ip'] = None
return nic
def _stop(self, vm_name, vmi_left_id, vmi_right_id, vmi_management_id):
docker_pid = self._client.inspect_container(vm_name)['State']['Pid']
if vmi_left_id is not None:
nic_left = self._create_nic_def(self.args.vmi_left_id)
else:
nic_left = None
if vmi_right_id is not None:
nic_right = self._create_nic_def(self.args.vmi_right_id)
else:
nic_right = None
if vmi_management_id is not None:
nic_management = self._create_nic_def(self.args.vmi_management_id)
nic_management = [nic_management]
else:
nic_management = []
netns_mgr = NetnsManager(str(docker_pid), nic_left, nic_right,
nic_management)
try:
#It is possible that namespace does not exists
netns_mgr.unplug_namespace_interface()
netns_mgr.destroy()
except ValueError:
pass
self._client.stop(vm_name)
netns_path = "/var/run/netns/%s" % docker_pid
if os.path.exists(netns_path):
os.remove(netns_path)
def create(self):
vm_name = validate_uuid(self.args.vm_id)
image_name = self.args.image
if self.args.instance_data:
instance_data = json.loads(self.args.instance_data)
else:
instance_data = {}
try:
self._client.inspect_image(image_name)
except APIError as e:
if e.response.status_code == 404:
self._client.pull(image_name)
self._client.inspect_image(image_name)
else:
raise
if self.args.command is not None:
command = self.args.command
elif "command" in instance_data:
command = instance_data["command"]
else:
# use container default
command = None
docker_id = None
try:
result = self._client.create_container(
image=image_name, name=vm_name, command=command, detach=True,
stdin_open=True, tty=True) # keep the container running
docker_id = result["Id"]
self._stop(vm_name, self.args.vmi_left_id, self.args.vmi_right_id,
self.args.vmi_management_id)
except APIError as e:
if e.response.status_code == 409:
if self.args.update:
container = self._client.inspect_container(vm_name)
docker_id = container["Id"]
else:
raise
if self.args.vmi_left_id is not None:
nic_left = self._create_nic_def(self.args.vmi_left_id,
self.args.vmi_left_mac,
self.args.vmi_left_ip)
else:
nic_left = None
if self.args.vmi_right_id is not None:
nic_right = self._create_nic_def(
|
remremrem/EV-Tribute
|
world/Net/netbase.py
|
Python
|
mit
| 6,335
| 0.027466
|
import socket,select,sys,time
from errors import *
from communicate import SendData, ReceiveData, ReceiveDataUDP
class TCPServer():
def __init__(self):
self.sending_socket = None
def input_func(self,sock,host,port,address):pass
def output_func(self,sock,host,port,address):pass
def connect_func(self,sock,host,port):pass
def client_connect_func(self,sock,host,port,address):pass
def client_disconnect_func(self,sock,host,port,address):pass
def quit_func(self,host,port):pass
def connect(self,host,port):
self.host = host
self.port = port
try:
self.unconnected_socket = socket.socket()
self.unconnected_socket.bind((self.host,self.port))
self.unconnected_socket.listen(5)
except:
self.unconnected_socket.close()
raise ServerError("Only one instance of the server on port "+str(self.port)+" may run at one time!")
self.connect_func(self.unconnected_socket,self.host,self.port)
self.connected_sockets = []
self.socketaddresses = {}
def remove_socket(self,sock):
address = self.socketaddresses[sock]
self.client_disconnect_func(sock,self.host,self.port,address)
self.connected_sockets.remove(sock)
def serve_forever(self):
self.looping = True
while self.looping:
input_ready,output_ready,except_ready = select.select([self.unconnected_socket]+self.connected_sockets,[],[])
for sock in input_ready:
if sock == self.unconnected_socket:
#init socket
connected_socket, address = sock.accept()
self.connected_sockets.append(connected_socket)
self.socketaddresses[connected_socket] = address
self.client_connect_func(connected_socket,self.host,self.port,address)
else:
try:
data = ReceiveData(sock)
address = self.socketaddresses[sock]
self.input_func(sock,self.host,self.port,address)
except:
data = "client quit"
if data != None:
if data == "client quit":
self.remove_socket(sock)
continue
self.sending_socket = sock
self.handle_data(data)
def handle_data(self,data):
pass
def send_data(self,data,compress=False):
try:
SendData(self.sending_socket,data,compress,includelength=True)
address = self.socketaddresses[self.sending_socket]
self.output_func(self.sending_socket,self.host,self.port,address)
except:
self.remove_socket(self.sending_socket)
def quit(self):
for s in self.connected_sockets: s.close()
self.quit_func(self.host,self.port)
class UDPServer():
def __init__(self):
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
def input_func(self,sock,host,port,address):pass
def output_func(self,sock,host,port,address):pass
def connect_func(self,sock,host,port):pass
def quit_func(self,host,port):pass
def connect(self,host,port):
self.host = host
self.port = port
try:
self.socket.bind((host, port))
except:
self.socket.close()
raise ServerError("Only one instance of the server on port "+str(self.port)+" may run at one time!")
self.connect_func(self.socket,self.host,self.port)
def serve_forever(self):
self.looping = True
while self.looping:
data,self.lastaddress = ReceiveDataUDP(self.socket)
self.input_func(self.socket,self.host,self.port,self.lastaddress)
self.handle_data(data)
def handle_data(self,data):
pass
def send_data(self,data,compress=False):
try:
SendData(self.socket,data,compress,address=self.lastaddress)
self.output_func(self.socket,self.host,self.port,self.lastaddress)
except:
pass
#client disconnected
def quit(self):
self.socket.close()
self.quit_func(self.host,self.port)
class TCPClient:
def __init__(self):
pass
def connect(self,host,port):
self.host = host
self.port = port
try:
self.socket = socket.socket()
self.socket.connect((self.host,self.port))
except:
self.socket.close()
raise SocketError("The connection c
|
ould not be opened. It must be created first with a server object.")
def send_data(self,
|
data,compress=False):
SendData(self.socket,data,compress,includelength=True)
def wait_for_data(self):
input_ready,output_ready,except_ready = select.select([self.socket],[],[])
return ReceiveData(self.socket)
def check_for_data(self):
input_ready,output_ready,except_ready = select.select([self.socket],[],[],0.001)
if len(input_ready) > 0:
return ReceiveData(self.socket)
def quit(self):
self.socket.close()
class UDPClient:
def __init__(self):
pass
def connect(self,host,port):
self.host = host
self.port = port
self.socket = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
self.socket.connect((self.host,self.port))
def send_data(self,data,compress=False):
SendData(self.socket,data,compress)
def wait_for_data(self):
input_ready,output_ready,except_ready = select.select([self.socket],[],[])
return ReceiveDataUDP(self.socket)[0]
def check_for_data(self):
input_ready,output_ready,except_ready = select.select([self.socket],[],[],0.001)
if len(input_ready) > 0:
return ReceiveDataUDP(self.socket)[0]
def quit(self):
self.socket.close()
|
mica-gossip/MiCA
|
tools/micavis/custom/RoundRobinMerge.py
|
Python
|
bsd-3-clause
| 31
| 0
|
from MergeI
|
ndepende
|
nt import *
|
lukehinds/anteater
|
anteater/src/virus_total.py
|
Python
|
apache-2.0
| 6,371
| 0.001727
|
# noinspection PyInterpreter
import json
import logging
import os
import re
import requests
import redis
import sys
import time
import urllib
import uuid
from pylimit import PyRateLimit
import six.moves.configparser
class VirusTotal():
def __init__(self, *args):
self.logger = logging.getLogger(__name__)
self.base_url = 'https://www.virustotal.com/vtapi/v2/'
self.headers = {"Accept-Encoding": "gzip, deflate", "User-Agent": "Anteater"}
self.HTTP_OK = 200
self.HTTP_BAD = 400
self.HTTP_FORBIDDEN = 403
self.HTTP_RATE_EXCEEDED = 204
self.public_api_sleep_time = 20
self.logger = logging.getLogger(__name__)
self.uuid = uuid.uuid4()
self.config = six.moves.configparser.SafeConfigParser()
self.config.read('anteater.conf')
try:
conn = redis.StrictRedis(
host='localhost',
port=6379,
password='')
conn.ping()
PyRateLimit.init(redis_host="localhost", redis_port=6379)
except Exception as ex:
self.logger.error('Error: %s', ex)
exit('Failed to connect, terminating.')
self.limit = PyRateLimit()
try:
vt_rate_type = self.config.get('config', 'vt_rate_type')
except six.moves.configparser.NoSectionError:
self.logger.error("A config section is required for vt_rate_type with a public | private option ")
sys.exit(1)
patten = re.compile(r'\bpublic\b|\bprivate\b')
if not patten.match(vt_rate_type):
self.logger.error("Unrecognized %s option for vt_rate_type", vt_rate_type)
sys.exit(1)
if vt_rate_type == 'public':
self.limit.create(21, 1)
elif vt_rate_type == 'private':
self.limit.create(1, 1)
def rate_limit(self):
"""
Simple rate limit function using redis
"""
rate_limited_msg = False
while True:
is_rate_limited = self.limit.is_rate_limited(uuid)
if is_rate_limited:
time.sleep(0.3) # save hammering redis
if not rate_limited_msg:
self.logger.info('Rate limit active..please wait...')
rate_limited_msg = True
if not is_rate_limited:
self.logger.info('Rate limit clear.')
self.limit.attempt(uuid)
return True
def scan_file(self, filename, apikey):
"""
Sends a file to virus total for assessment
"""
url = self.base_url + "file/scan"
params = {'apikey': apikey}
scanfile = {"file": open(filename, 'rb')}
response = requests.post(url, files=scanfile, params=params)
rate_limit_clear = self.rate_limit()
if rate_limit_clear:
if response.status_code == sel
|
f.HTTP
|
_OK:
json_response = response.json()
return json_response
elif response.status_code == self.HTTP_RATE_EXCEEDED:
time.sleep(20)
else:
self.logger.error("sent: %s, HTTP: %d", filename, response.status_code)
def rescan_file(self, filename, sha256hash, apikey):
"""
just send the hash, check the date
"""
url = self.base_url + "file/rescan"
params = {
'apikey': apikey,
'resource': sha256hash
}
rate_limit_clear = self.rate_limit()
if rate_limit_clear:
response = requests.post(url, params=params)
if response.status_code == self.HTTP_OK:
self.logger.info("sent: %s, HTTP: %d, content: %s", os.path.basename(filename), response.status_code, response.text)
elif response.status_code == self.HTTP_RATE_EXCEEDED:
time.sleep(20)
else:
self.logger.error("sent: %s, HTTP: %d", os.path.basename(filename), response.status_code)
return response
def binary_report(self, sha256sum, apikey):
"""
retrieve report from file scan
"""
url = self.base_url + "file/report"
params = {"apikey": apikey, "resource": sha256sum}
rate_limit_clear = self.rate_limit()
if rate_limit_clear:
response = requests.post(url, data=params)
if response.status_code == self.HTTP_OK:
json_response = response.json()
response_code = json_response['response_code']
return json_response
elif response.status_code == self.HTTP_RATE_EXCEEDED:
time.sleep(20)
else:
self.logger.warning("retrieve report: %s, HTTP code: %d", os.path.basename(filename), response.status_code)
def send_ip(self, ipaddr, apikey):
"""
Send IP address for list of past malicous domain associations
"""
url = self.base_url + "ip-address/report"
parameters = {"ip": ipaddr, "apikey": apikey}
rate_limit_clear = self.rate_limit()
if rate_limit_clear:
response = requests.get(url, params=parameters)
if response.status_code == self.HTTP_OK:
json_response = response.json()
return json_response
elif response.status_code == self.HTTP_RATE_EXCEEDED:
time.sleep(20)
else:
self.logger.error("sent: %s, HTTP: %d", ipaddr, response.status_code)
time.sleep(self.public_api_sleep_time)
def url_report(self, scan_url, apikey):
"""
Send URLS for list of past malicous associations
"""
url = self.base_url + "url/report"
params = {"apikey": apikey, 'resource': scan_url}
rate_limit_clear = self.rate_limit()
if rate_limit_clear:
response = requests.post(url, params=params, headers=self.headers)
if response.status_code == self.HTTP_OK:
json_response = response.json()
return json_response
elif response.status_code == self.HTTP_RATE_EXCEEDED:
time.sleep(20)
else:
self.logger.error("sent: %s, HTTP: %d", scan_url, response.status_code)
time.sleep(self.public_api_sleep_time)
|
dropbox/changes
|
changes/artifacts/collection_artifact.py
|
Python
|
apache-2.0
| 1,917
| 0.004173
|
from __future__ import absolute_import
import json
from changes.config import db
from changes.constants import Result
from changes.models.jobplan import JobPlan
from changes.utils.http import build_web_uri
from .base import ArtifactHandler, ArtifactParseError
class CollectionArtifactHandler(ArtifactHandler):
"""
Base class artifact handler for collection (jobs.json and tests.json) files.
Does the required job expansion. Subclasses are expected to set
cls.FILENAMES to the handleable files in question.
"""
def process(self, fp, artifact):
try:
phase_config = json.load(fp)
except ValueError:
uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex))
self.logger.warning('Failed to parse json; (step=%s, build=%s)', self.step.id.hex, uri, exc_info=True)
self.report_malformed()
else:
_, implementation = JobPlan.get_build_step_for_job(job_id=self.step.job_id)
try:
implementation.expand_jobs(self.step, phase_config)
except ArtifactParseError:
uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex))
self.logger.warning('malformed %s artifact (step=%s, build=%s)', self.FILENAMES[0],
self.step.id.hex, uri, exc_info=True)
self.report_malformed()
except Exception:
uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex))
self.logger.warning('expand_jobs failed (step=%s, build=%s)', self.step.id.hex, uri, exc_info=True)
self.step.result = Result.infra_failed
db.ses
|
sion.add(self.step)
|
db.session.commit()
class TestsJsonHandler(CollectionArtifactHandler):
# only match in the root directory
FILENAMES = ('/tests.json',)
|
burakince/ocl_web
|
ocl_web/libs/ocl/star.py
|
Python
|
mpl-2.0
| 216
| 0
|
# from ..ocl import
|
ApiResource
# class Star(ApiResource):
# def __init__(self):
# super(Star, sel
|
f).__init__()
# self.resource = {}
# self.username = ""
# self.dateStarred = ""
|
pombreda/djapian
|
src/djapian/tests/query.py
|
Python
|
bsd-3-clause
| 718
| 0.002786
|
from djapian.tests.utils impor
|
t BaseIndexerTest, BaseTestCase, Entry
def query_test(query, count):
class _QueryTest(BaseIndexerTest, BaseTestCase):
def setUp(self):
super(_QueryTest, self).setUp()
self.result = Entry.indexer.search(query)
def test_result_count(self):
self.assertEqual(len(self.result), count)
_QueryTest.__name__ = _QueryTest.__name__ + '_' + query.replace(" ", "_")
return _QueryTest
IndexerSearchCharFieldTest = query_test("title:test", 2)
Indexe
|
rSearchAliasFieldTest = query_test("subject:test", 2)
IndexerSearchBoolFieldTest = query_test("active:True", 3)
IndexerSearchAndQueryTest = query_test("title:test AND title:another", 1)
|
timkrentz/SunTracker
|
IMU/VTK-6.2.0/Web/Python/vtk/web/protocols.py
|
Python
|
mit
| 11,069
| 0.003523
|
r"""protocols is a module that contains a set of VTK Web related
protocols that can be combined together to provide a flexible way to define
very specific web application.
"""
from time import time
import os, sys, logging, types, inspect, traceback, logging, re
from vtkWebCorePython import vtkWebApplication, vtkWebInteractionEvent
from autobahn.wamp import register as exportRpc
# =============================================================================
#
# Base class for any VTK Web based protocol
#
# =============================================================================
class vtkWebProtocol(object):
def setApplication(self, app):
self.Application = app
def getApplication(self):
return self.Application
def mapIdToObject(self, id):
"""
Maps global-id for a vtkObject to the vtkObject instance. May return None if the
id is not valid.
"""
id = int(id)
if id <= 0:
return None
return self.Application.GetObjectIdMap().GetVTKObject(id)
def getGlobalId(self, obj):
"""
Return the id for a given vtkObject
"""
return self.Application.GetObjectIdMap().GetGlobalId(obj)
def getView(self, vid):
"""
Returns the view for a given view ID, if vid is None then return the
current active view.
:param vid: The view ID
:type vid: str
"""
view = self.mapIdToObject(vid)
if not view:
# Use active view is none provided.
view = self.Application.GetObjectIdMap().GetActiveObject("VIEW")
if not view:
raise Exception("no view provided: " + vid)
return view
def setActiveView(self, view):
"""
Set a vtkRenderWindow to be the active one
"""
self.Application.GetObjectIdMap().SetActiveObject("VIEW", view)
# =============================================================================
#
# Handle Mouse interaction on any type of view
#
# =============================================================================
class vtkWebMouseHandler(vtkWebProtocol):
@exportRpc("viewport.mouse.interaction")
def mouseInteraction(self, event):
"""
RPC Callback for mouse interactions.
"""
view = self.getView(event['view'])
buttons = 0
if event["buttonLeft"]:
buttons |= vtkWebInteractionEvent.LEFT_BUTTON;
if event["buttonMiddle"]:
buttons |= vtkWebInteractionEvent.MIDDLE_BUTTON;
if event["buttonRight"]:
buttons |= vtkWebInteractionEvent.RIGHT_BUTTON;
modifiers = 0
if event["shiftKey"]:
modifiers |= vtkWebInteractionEvent.SHIFT_KEY
if event["ctrlKey"]:
modifiers |= vtkWebInteractionEvent.CTRL_KEY
if event["altKey"]:
modifiers |= vtkWebInteractionEvent.ALT_KEY
if event["metaKey"]:
modifiers |= vtkWebInteractionEvent.META_KEY
pvevent = vtkWebInteractionEvent()
pvevent.SetButtons(buttons)
pvevent.SetModifiers(modifiers)
if event.has_key("x"):
pvevent.SetX(event["x"])
if event.has_key("y"):
pvevent.SetY(event["y"])
if event.has_key("scroll"):
pvevent.SetScroll(event["scroll"])
if event["action"] == 'dblclick':
pvevent.SetRepeatCount(2)
#pvevent.SetKeyCode(event["charCode"])
retVal = self.getApplication().HandleInteractionEvent(view, pvevent)
del pvevent
return retVal
# =============================================================================
#
# Basic 3D Viewport API (Camera + Orientation + CenterOfRotation
#
# =============================================================================
class vtkWebViewPort(vtkWebProtocol):
@exportRpc("viewport.camera.reset")
def resetCamera(self, view):
"""
RPC callback to reset camera.
"""
view = self.getView(view)
camera = view.GetRenderer().GetActiveCamera()
camera.ResetCamera()
try:
# FIXME seb: view.CenterOfRotation = camera.GetFocalPoint()
print "FIXME"
except:
pass
self.getApplication().InvalidateCache(view)
return str(self.getGlobalId(view))
@exportRpc("viewport.axes.orientation.visibility.update")
def updateOrientationAxesVisibility(self, view, sh
|
owAxis):
"""
|
RPC callback to show/hide OrientationAxis.
"""
view = self.getView(view)
# FIXME seb: view.OrientationAxesVisibility = (showAxis if 1 else 0);
self.getApplication().InvalidateCache(view)
return str(self.getGlobalId(view))
@exportRpc("viewport.axes.center.visibility.update")
def updateCenterAxesVisibility(self, view, showAxis):
"""
RPC callback to show/hide CenterAxesVisibility.
"""
view = self.getView(view)
# FIXME seb: view.CenterAxesVisibility = (showAxis if 1 else 0);
self.getApplication().InvalidateCache(view)
return str(self.getGlobalId(view))
@exportRpc("viewport.camera.update")
def updateCamera(self, view_id, focal_point, view_up, position):
view = self.getView(view_id)
camera = view.GetRenderer().GetActiveCamera()
camera.SetFocalPoint(focal_point)
camera.SetCameraViewUp(view_up)
camera.SetCameraPosition(position)
self.getApplication().InvalidateCache(view)
# =============================================================================
#
# Provide Image delivery mechanism
#
# =============================================================================
class vtkWebViewPortImageDelivery(vtkWebProtocol):
@exportRpc("viewport.image.render")
def stillRender(self, options):
"""
RPC Callback to render a view and obtain the rendered image.
"""
beginTime = int(round(time() * 1000))
view = self.getView(options["view"])
size = [view.GetSize()[0], view.GetSize()[1]]
if options and options.has_key("size"):
size = options["size"]
if size[0] > 0 and size[1] > 0:
view.SetSize(size)
t = 0
if options and options.has_key("mtime"):
t = options["mtime"]
quality = 100
if options and options.has_key("quality"):
quality = options["quality"]
localTime = 0
if options and options.has_key("localTime"):
localTime = options["localTime"]
reply = {}
app = self.getApplication()
if t == 0:
app.InvalidateCache(view)
reply["image"] = app.StillRenderToString(view, t, quality)
reply["stale"] = app.GetHasImagesBeingProcessed(view)
reply["mtime"] = app.GetLastStillRenderToStringMTime()
reply["size"] = [view.GetSize()[0], view.GetSize()[1]]
reply["format"] = "jpeg;base64"
reply["global_id"] = str(self.getGlobalId(view))
reply["localTime"] = localTime
endTime = int(round(time() * 1000))
reply["workTime"] = (endTime - beginTime)
return reply
# =============================================================================
#
# Provide Geometry delivery mechanism (WebGL)
#
# =============================================================================
class vtkWebViewPortGeometryDelivery(vtkWebProtocol):
@exportRpc("viewport.webgl.metadata")
def getSceneMetaData(self, view_id):
view = self.getView(view_id);
data = self.getApplication().GetWebGLSceneMetaData(view)
return data
@exportRpc("viewport.webgl.data")
def getWebGLData(self, view_id, object_id, part):
view = self.getView(view_id)
data = self.getApplication().GetWebGLBinaryData(view, str(object_id), part-1)
return dat
|
bnaul/scikit-learn
|
examples/miscellaneous/plot_johnson_lindenstrauss_bound.py
|
Python
|
bsd-3-clause
| 7,785
| 0.001413
|
r"""
=====================================================================
The Johnson-Lindenstrauss bound for embedding with random projections
=====================================================================
The `Johnson-Lindenstrauss lemma`_ states that any high dimensional
dataset can be randomly projected into a lower dimensional Euclidean
space while controlling the distortion in the pairwise distances.
.. _`Johnson-Lindenstrauss lemma`: https://en.wikipedia.org/wiki/\
Johnson%E2%80%93Lindenstrauss_lemma
"""
print(__doc__)
import sys
from time import time
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from sklearn.random_projection import johnson_lindenstrauss_min_dim
from sklearn.random_projection import SparseRandomProjection
from sklearn.datasets import fetch_20newsgroups_vectorized
from sklearn.datasets import load_digits
from sklearn.metrics.pairwise import e
|
uclidean_distances
from sklearn.utils.fixes import parse_version
# `normed` is being deprecated in favor of `density` in histograms
if parse_version(matplotlib.__version__) >= parse_version('2.1'):
density_param = {'density': True}
else:
density_param = {'normed': True}
# %%
# Theor
|
etical bounds
# ==================
# The distortion introduced by a random projection `p` is asserted by
# the fact that `p` is defining an eps-embedding with good probability
# as defined by:
#
# .. math::
# (1 - eps) \|u - v\|^2 < \|p(u) - p(v)\|^2 < (1 + eps) \|u - v\|^2
#
# Where u and v are any rows taken from a dataset of shape [n_samples,
# n_features] and p is a projection by a random Gaussian N(0, 1) matrix
# with shape [n_components, n_features] (or a sparse Achlioptas matrix).
#
# The minimum number of components to guarantees the eps-embedding is
# given by:
#
# .. math::
# n\_components >= 4 log(n\_samples) / (eps^2 / 2 - eps^3 / 3)
#
#
# The first plot shows that with an increasing number of samples ``n_samples``,
# the minimal number of dimensions ``n_components`` increased logarithmically
# in order to guarantee an ``eps``-embedding.
# range of admissible distortions
eps_range = np.linspace(0.1, 0.99, 5)
colors = plt.cm.Blues(np.linspace(0.3, 1.0, len(eps_range)))
# range of number of samples (observation) to embed
n_samples_range = np.logspace(1, 9, 9)
plt.figure()
for eps, color in zip(eps_range, colors):
min_n_components = johnson_lindenstrauss_min_dim(n_samples_range, eps=eps)
plt.loglog(n_samples_range, min_n_components, color=color)
plt.legend(["eps = %0.1f" % eps for eps in eps_range], loc="lower right")
plt.xlabel("Number of observations to eps-embed")
plt.ylabel("Minimum number of dimensions")
plt.title("Johnson-Lindenstrauss bounds:\nn_samples vs n_components")
plt.show()
# %%
# The second plot shows that an increase of the admissible
# distortion ``eps`` allows to reduce drastically the minimal number of
# dimensions ``n_components`` for a given number of samples ``n_samples``
# range of admissible distortions
eps_range = np.linspace(0.01, 0.99, 100)
# range of number of samples (observation) to embed
n_samples_range = np.logspace(2, 6, 5)
colors = plt.cm.Blues(np.linspace(0.3, 1.0, len(n_samples_range)))
plt.figure()
for n_samples, color in zip(n_samples_range, colors):
min_n_components = johnson_lindenstrauss_min_dim(n_samples, eps=eps_range)
plt.semilogy(eps_range, min_n_components, color=color)
plt.legend(["n_samples = %d" % n for n in n_samples_range], loc="upper right")
plt.xlabel("Distortion eps")
plt.ylabel("Minimum number of dimensions")
plt.title("Johnson-Lindenstrauss bounds:\nn_components vs eps")
plt.show()
# %%
# Empirical validation
# ====================
#
# We validate the above bounds on the 20 newsgroups text document
# (TF-IDF word frequencies) dataset or on the digits dataset:
#
# - for the 20 newsgroups dataset some 500 documents with 100k
# features in total are projected using a sparse random matrix to smaller
# euclidean spaces with various values for the target number of dimensions
# ``n_components``.
#
# - for the digits dataset, some 8x8 gray level pixels data for 500
# handwritten digits pictures are randomly projected to spaces for various
# larger number of dimensions ``n_components``.
#
# The default dataset is the 20 newsgroups dataset. To run the example on the
# digits dataset, pass the ``--use-digits-dataset`` command line argument to
# this script.
if '--use-digits-dataset' in sys.argv:
data = load_digits().data[:500]
else:
data = fetch_20newsgroups_vectorized().data[:500]
# %%
# For each value of ``n_components``, we plot:
#
# - 2D distribution of sample pairs with pairwise distances in original
# and projected spaces as x and y axis respectively.
#
# - 1D histogram of the ratio of those distances (projected / original).
n_samples, n_features = data.shape
print("Embedding %d samples with dim %d using various random projections"
% (n_samples, n_features))
n_components_range = np.array([300, 1000, 10000])
dists = euclidean_distances(data, squared=True).ravel()
# select only non-identical samples pairs
nonzero = dists != 0
dists = dists[nonzero]
for n_components in n_components_range:
t0 = time()
rp = SparseRandomProjection(n_components=n_components)
projected_data = rp.fit_transform(data)
print("Projected %d samples from %d to %d in %0.3fs"
% (n_samples, n_features, n_components, time() - t0))
if hasattr(rp, 'components_'):
n_bytes = rp.components_.data.nbytes
n_bytes += rp.components_.indices.nbytes
print("Random matrix with size: %0.3fMB" % (n_bytes / 1e6))
projected_dists = euclidean_distances(
projected_data, squared=True).ravel()[nonzero]
plt.figure()
min_dist = min(projected_dists.min(), dists.min())
max_dist = max(projected_dists.max(), dists.max())
plt.hexbin(dists, projected_dists, gridsize=100, cmap=plt.cm.PuBu,
extent=[min_dist, max_dist, min_dist, max_dist])
plt.xlabel("Pairwise squared distances in original space")
plt.ylabel("Pairwise squared distances in projected space")
plt.title("Pairwise distances distribution for n_components=%d" %
n_components)
cb = plt.colorbar()
cb.set_label('Sample pairs counts')
rates = projected_dists / dists
print("Mean distances rate: %0.2f (%0.2f)"
% (np.mean(rates), np.std(rates)))
plt.figure()
plt.hist(rates, bins=50, range=(0., 2.), edgecolor='k', **density_param)
plt.xlabel("Squared distances rate: projected / original")
plt.ylabel("Distribution of samples pairs")
plt.title("Histogram of pairwise distance rates for n_components=%d" %
n_components)
# TODO: compute the expected value of eps and add them to the previous plot
# as vertical lines / region
plt.show()
# %%
# We can see that for low values of ``n_components`` the distribution is wide
# with many distorted pairs and a skewed distribution (due to the hard
# limit of zero ratio on the left as distances are always positives)
# while for larger values of n_components the distortion is controlled
# and the distances are well preserved by the random projection.
# %%
# Remarks
# =======
#
# According to the JL lemma, projecting 500 samples without too much distortion
# will require at least several thousands dimensions, irrespective of the
# number of features of the original dataset.
#
# Hence using random projections on the digits dataset which only has 64
# features in the input space does not make sense: it does not allow
# for dimensionality reduction in this case.
#
# On the twenty newsgroups on the other hand the dimensionality can be
# decreased from 56436 down to 10000 while reasonably preserving
# pairwise distances.
|
alexfalcucc/anaconda
|
anaconda_lib/linting/anaconda_pep8.py
|
Python
|
gpl-3.0
| 4,416
| 0.000226
|
# -*- coding: utf8 -*-
# Copyright (C) 2013 - Oscar Campos <oscar.campos@member.fsf.org>
# This program is Free Software see LICENSE file for details
import os
import pep8
from linting import linter
class Pep8Error(linter.LintError):
"""PEP-8 linting error class
"""
def __init__(self, filename, loc, offset, code, text, level='E'):
ct_tuple = (code, text)
err_str = '[{0}] PEP 8 (%s): %s'.format(level)
super(Pep8Error, self).__init__(
filename, loc, level, err_str, ct_tuple, offset=offset, text=text
)
class Pep8Warning(linter.LintError):
"""PEP-8 lintng warning class
"""
def __init__(self, filename, loc, offset, code, text, level='W'):
ct_tuple = (code, text)
err_str = '[{0}] PEP 8 (%s): %s'.format(level)
super(Pep8Warning, self).__init__(
filename, loc, level, err_str, ct_tuple, offset=offset, text=text
)
class Pep8Linter(linter.Linter):
"""Linter for pep8 Linter
"""
def lint(self, settings, code, filename):
"""Run the pep8 code checker with the given options
"""
errors = []
check_params = {
'ignore': settings.get('pep8_ignore', []),
'max_line_length': settings.get(
'pep8_max_line_length', pep8.MAX_LINE_LENGTH
),
'levels': settings.get('pep8_error_levels', {
'E': 'W', 'W': 'V', 'V': 'V'
})
}
errors.extend(self.check(
code, filename, settings.get('pep8_rcfile'), **check_params
))
return self.parse(errors)
def check(self, code, filename, rcfile, ignore, max_line_length, levels):
"""Check the code with pyflakes to find errors
"""
messages = []
_lines = code.split('\n')
if _lines:
class AnacondaReport(pep8.BaseReport):
"""Helper class to report PEP8 problems
"""
def error(self, line_number, offset, text, check):
"""Report an error, according to options
"""
col = line_number
code = text[:4]
message = text[5:]
if self._ignore_code(code):
return
|
if code in self.counters:
self.counters[code] += 1
else:
self.counters[code] = 1
self.messages[code] = message
if code in self.expected:
return
self.file_errors += 1
self.total_errors += 1
print(code)
pep8_erro
|
r = code.startswith('E')
klass = Pep8Error if pep8_error else Pep8Warning
messages.append(klass(
filename, col, offset, code, message, levels[code[0]]
))
return code
params = {'reporter': AnacondaReport}
if not rcfile:
_ignore = ignore + pep8.DEFAULT_IGNORE.split(',')
params['ignore'] = _ignore
else:
params['config_file'] = os.path.expanduser(rcfile)
options = pep8.StyleGuide(**params).options
if not rcfile:
options.max_line_length = max_line_length
good_lines = [l + '\n' for l in _lines]
good_lines[-1] = good_lines[-1].rstrip('\n')
if not good_lines[-1]:
good_lines = good_lines[:-1]
pep8.Checker(filename, good_lines, options=options).check_all()
return messages
def parse(self, errors):
errors_list = []
if errors is None:
return errors_list
self.sort_errors(errors)
for error in errors:
error_level = self.prepare_error_level(error)
message = error.message.capitalize()
offset = error.offset
error_data = {
'underline_range': True,
'level': error_level,
'lineno': error.lineno,
'offset': offset,
'message': message,
'raw_error': str(error)
}
errors_list.append(error_data)
return errors_list
|
nanchenchen/script-analysis
|
pyanalysis/precompilers.py
|
Python
|
mit
| 843
| 0.002372
|
# This file is a fix for this issue with django-compressor
# https://
|
github.com/django-compressor/django-compressor/issues/226
# This is a less filter that explicitly calls CssAbsoluteFilter.
# After adding the relative-urls flag to the lessc command,
# it appears to be unnecessary but I'm leaving it here in case
# we need it later for other deployment setups.
from compressor.filters.base import CompilerFilter
from compressor.filters.css_default import CssAbsoluteFilter
from django.conf import settings
class LessFilter(CompilerFilter):
def __init_
|
_(self, content, attrs, **kwargs):
super(LessFilter, self).__init__(content, command=settings.BIN_LESSC_COMMAND, **kwargs)
def input(self, **kwargs):
content = super(LessFilter, self).input(**kwargs)
return CssAbsoluteFilter(content).input(**kwargs)
|
rohitranjan1991/home-assistant
|
homeassistant/components/homekit_controller/binary_sensor.py
|
Python
|
mit
| 4,745
| 0.000211
|
"""Support for Homekit motion sensors."""
from __future__ import annotations
from aiohomekit.model.characteristics import CharacteristicsTypes
from aiohomekit.model.services import Service, ServicesTypes
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import KNOWN_DEVICES, HomeKitEntity
class HomeKitMotionSensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit motion sensor."""
_attr_device_class = BinarySensorDeviceClass.MOTION
def get_characteristic_types(self) -> list[str]:
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.MOTION_DETECTED]
@property
def is_on(self) -> bool:
"""Has motion been detected."""
return self.service.value(CharacteristicsTypes.MOTION_DETECTED) is True
class HomeKitContactSensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit contact sensor."""
_attr_device_class = BinarySensorDeviceClass.OPENING
def get_characteristic_types(self) -> list[str]:
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.CONTACT_STATE]
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on/open."""
return self.service.value(CharacteristicsTypes.CONTACT_STATE) == 1
class HomeKitSmokeSensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit smoke sensor."""
_attr_device_class = BinarySensorDeviceClass.SMOKE
def get_characteristic_types(self) -> list[str]:
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.SMOKE_DETECTED]
@property
def is_on(self) -> bool:
"""Return true if smoke is currently detected."""
return self.service.value(CharacteristicsTypes.SMOKE_DETECTED) == 1
class HomeKitCarbonMonoxideSensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit BO sensor."""
_attr_device_class = BinarySensorDeviceClass.GAS
def get_characteristic_types(self) -> list[str]:
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.CARBON_MONOXIDE_DETECTED]
@property
def is_on(self) -> bool:
"""Return true if CO is currently detected."""
return self.service.value(CharacteristicsTypes.CARBON_MONOXIDE_DETECTED) == 1
class HomeKitOccupancySensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit occupancy sensor."""
_attr_device_class = BinarySensorDeviceClass.OCCUPANCY
def get_characteristic_types(self) -> list[str]:
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.OCCUPANCY_DETECTED]
@property
def is_on(self)
|
-> bool:
"""Return true if occupancy is currently detected."""
return self.service.value(CharacteristicsTypes.OCCUPANCY_DETECTED) == 1
class HomeKitLeakSensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit leak sensor."""
_attr_device_class = Binar
|
ySensorDeviceClass.MOISTURE
def get_characteristic_types(self) -> list[str]:
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.LEAK_DETECTED]
@property
def is_on(self) -> bool:
"""Return true if a leak is detected from the binary sensor."""
return self.service.value(CharacteristicsTypes.LEAK_DETECTED) == 1
ENTITY_TYPES = {
ServicesTypes.MOTION_SENSOR: HomeKitMotionSensor,
ServicesTypes.CONTACT_SENSOR: HomeKitContactSensor,
ServicesTypes.SMOKE_SENSOR: HomeKitSmokeSensor,
ServicesTypes.CARBON_MONOXIDE_SENSOR: HomeKitCarbonMonoxideSensor,
ServicesTypes.OCCUPANCY_SENSOR: HomeKitOccupancySensor,
ServicesTypes.LEAK_SENSOR: HomeKitLeakSensor,
}
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up Homekit lighting."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(service: Service) -> bool:
if not (entity_class := ENTITY_TYPES.get(service.type)):
return False
info = {"aid": service.accessory.aid, "iid": service.iid}
async_add_entities([entity_class(conn, info)], True)
return True
conn.add_listener(async_add_service)
|
owlabs/incubator-airflow
|
tests/www/test_utils.py
|
Python
|
apache-2.0
| 14,133
| 0.001132
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import functools
from bs4 import BeautifulSoup
import mock
import six
from six.moves.urllib.parse import parse_qs, quote_plus
from parameterized import parameterized
from airflow.models import DagRun, Log, DagBag
from airflow.settings impor
|
t Session
from airflow.utils.state import State
from airflow.utils import dates, timezone
from airflow.www import utils, app as application
from tests.test_utils.config import conf_vars
if six.PY2:
|
# Need `assertRegex` back-ported from unittest2
import unittest2 as unittest
else:
import unittest
class UtilsTest(unittest.TestCase):
def setUp(self):
super(UtilsTest, self).setUp()
def test_empty_variable_should_not_be_hidden(self):
self.assertFalse(utils.should_hide_value_for_key(""))
self.assertFalse(utils.should_hide_value_for_key(None))
def test_normal_variable_should_not_be_hidden(self):
self.assertFalse(utils.should_hide_value_for_key("key"))
def test_sensitive_variable_should_be_hidden(self):
self.assertTrue(utils.should_hide_value_for_key("google_api_key"))
def test_sensitive_variable_should_be_hidden_ic(self):
self.assertTrue(utils.should_hide_value_for_key("GOOGLE_API_KEY"))
def check_generate_pages_html(self, current_page, total_pages,
window=7, check_middle=False):
extra_links = 4 # first, prev, next, last
search = "'>\"/><img src=x onerror=alert(1)>"
html_str = utils.generate_pages(current_page, total_pages,
search=search)
self.assertNotIn(search, html_str,
"The raw search string shouldn't appear in the output")
self.assertIn('search=%27%3E%22%2F%3E%3Cimg+src%3Dx+onerror%3Dalert%281%29%3E',
html_str)
self.assertTrue(
callable(html_str.__html__),
"Should return something that is HTML-escaping aware"
)
dom = BeautifulSoup(html_str, 'html.parser')
self.assertIsNotNone(dom)
ulist = dom.ul
ulist_items = ulist.find_all('li')
self.assertEqual(min(window, total_pages) + extra_links, len(ulist_items))
page_items = ulist_items[2:-2]
mid = int(len(page_items) / 2)
for i, item in enumerate(page_items):
a_node = item.a
href_link = a_node['href']
node_text = a_node.string
if node_text == str(current_page + 1):
if check_middle:
self.assertEqual(mid, i)
self.assertEqual('javascript:void(0)', href_link)
self.assertIn('active', item['class'])
else:
self.assertRegex(href_link, r'^\?', 'Link is page-relative')
query = parse_qs(href_link[1:])
self.assertListEqual(query['page'], [str(int(node_text) - 1)])
self.assertListEqual(query['search'], [search])
def test_generate_pager_current_start(self):
self.check_generate_pages_html(current_page=0,
total_pages=6)
def test_generate_pager_current_middle(self):
self.check_generate_pages_html(current_page=10,
total_pages=20,
check_middle=True)
def test_generate_pager_current_end(self):
self.check_generate_pages_html(current_page=38,
total_pages=39)
def test_params_no_values(self):
"""Should return an empty string if no params are passed"""
self.assertEqual('', utils.get_params())
def test_params_search(self):
self.assertEqual('search=bash_',
utils.get_params(search='bash_'))
@parameterized.expand([
(True, False, ''),
(False, True, ''),
(True, True, 'showPaused=True'),
(False, False, 'showPaused=False'),
(None, True, ''),
(None, False, ''),
])
def test_params_showPaused(self, show_paused, hide_by_default, expected_result):
with conf_vars({('webserver', 'hide_paused_dags_by_default'): str(hide_by_default)}):
self.assertEqual(expected_result,
utils.get_params(showPaused=show_paused))
@parameterized.expand([
(True, False, True),
(False, True, True),
(True, True, False),
(False, False, False),
(None, True, True),
(None, False, True),
])
def test_should_remove_show_paused_from_url_params(self, show_paused,
hide_by_default, expected_result):
with conf_vars({('webserver', 'hide_paused_dags_by_default'): str(hide_by_default)}):
self.assertEqual(
expected_result,
utils._should_remove_show_paused_from_url_params(
show_paused,
hide_by_default
)
)
def test_params_none_and_zero(self):
qs = utils.get_params(a=0, b=None)
# The order won't be consistent, but that doesn't affect behaviour of a browser
pairs = list(sorted(qs.split('&')))
self.assertListEqual(['a=0', 'b='], pairs)
def test_params_all(self):
query = utils.get_params(showPaused=False, page=3, search='bash_')
self.assertEqual(
{'page': ['3'],
'search': ['bash_'],
'showPaused': ['False']},
parse_qs(query)
)
def test_params_escape(self):
self.assertEqual('search=%27%3E%22%2F%3E%3Cimg+src%3Dx+onerror%3Dalert%281%29%3E',
utils.get_params(search="'>\"/><img src=x onerror=alert(1)>"))
# flask_login is loaded by calling flask_login.utils._get_user.
@mock.patch("flask_login.utils._get_user")
@mock.patch("airflow.settings.Session")
def test_action_logging_with_login_user(self, mocked_session, mocked_get_user):
fake_username = 'someone'
mocked_current_user = mock.MagicMock()
mocked_get_user.return_value = mocked_current_user
mocked_current_user.user.username = fake_username
mocked_session_instance = mock.MagicMock()
mocked_session.return_value = mocked_session_instance
app = application.create_app(testing=True)
# Patching here to avoid errors in applicant.create_app
with mock.patch("airflow.models.Log") as mocked_log:
with app.test_request_context():
@utils.action_logging
def some_func():
pass
some_func()
mocked_log.assert_called_once()
(args, kwargs) = mocked_log.call_args_list[0]
self.assertEqual('some_func', kwargs['event'])
self.assertEqual(fake_username, kwargs['owner'])
mocked_session_instance.add.assert_called_once()
@mock.patch("flask_login.utils._get_user")
@mock.patch("airflow.settings.Session")
def test_action_logging_with_invalid_user(self, mocked_session, mocked_get_user):
anonymous_username = 'anonymous'
# When the user returned by flask login_manager._load_user
# is invalid.
mocked_current_user = mock.MagicMock()
mocked_get_user.r
|
jacburge/wewillremember
|
app/views.py
|
Python
|
apache-2.0
| 610
| 0.013115
|
from app import app
from flask import render_template
@app.route('/')
def index():
return render_template('index.html')
@app.route('/story/')
def story():
return render_template('story.html')
@app.route('/bio/')
def bio():
return render_template('bio.html')
@app.route('/contact/')
def contact():
return render_template('contact.html')
# @app.route('/fun/')
# def fun():
# return rende
|
r_template('fun.html')
# @app.route('/por
|
tfolio/')
# def portfolio():
# return render_template('portfolio.html')
# @app.route('/boot_index/')
# def boot_index():
# return render_template('bootstrap_index.html')
|
art-of-dom/hash-it
|
test/test_validate_hash.py
|
Python
|
mit
| 4,257
| 0.00047
|
'''Tests for the ValidateHash object'''
from __future__ import absolute_import
import unittest
from nose.tools import assert_true, assert_false
from hashit.core.hash_data import HashData
from hashit.core.hash_type import HashType
from hashit.service.validate_hash import ValidateHash
from hashit.utils.data_encap import DataEncap
from hashit.utils.data_type import DataType
# pylint: disable=missing-docstring
# pylint: disable=invalid-name
# pylint: disable=no-self-use
class TestHashIt(unittest.TestCase):
def setUp(self):
self.data = HashData(
DataEncap(DataType.FILE, "test/suppor
|
t/example.bin"))
def tearDown(self):
pass
def test_verify_hash_crc8_expected_result(self):
assert_true(ValidateHash(
result="14",
hash_type=HashType.CRC8,
data=self.data
).is_vaild())
def test_verify_hash_crc8_bad_result(self):
assert_false(ValidateHash(
result="FE",
hash_type=HashType.CRC8,
|
data=self.data
).is_vaild())
def test_verify_hash_crc16_expected_result(self):
assert_true(ValidateHash(
result="BAD3",
hash_type=HashType.CRC16,
data=self.data
).is_vaild())
def test_verify_hash_crc16_bad_result(self):
assert_false(ValidateHash(
result="78E7",
hash_type=HashType.CRC16,
data=self.data
).is_vaild())
def test_verify_hash_crc32_expected_result(self):
assert_true(ValidateHash(
result="29058C73",
hash_type=HashType.CRC32,
data=self.data
).is_vaild())
def test_verify_hash_crc32_bad_result(self):
assert_false(ValidateHash(
result="ACEF2345",
hash_type=HashType.CRC32,
data=self.data
).is_vaild())
def test_verify_hash_crc64_expected_result(self):
assert_true(ValidateHash(
result="6C27EAA78BA3F822",
hash_type=HashType.CRC64,
data=self.data
).is_vaild())
def test_verify_hash_crc64_bad_result(self):
assert_false(ValidateHash(
result="DEADBEEFF00DB00F",
hash_type=HashType.CRC64,
data=self.data
).is_vaild())
def test_verify_hash_md5_expected_result(self):
assert_true(ValidateHash(
result="E2C865DB4162BED963BFAA9EF6AC18F0",
hash_type=HashType.MD5,
data=self.data
).is_vaild())
def test_verify_hash_md5_bad_result(self):
assert_false(ValidateHash(
result="11223344556677889900AECF431304065",
hash_type=HashType.MD5,
data=self.data
).is_vaild())
def test_verify_hash_sha1_expected_result(self):
assert_true(ValidateHash(
result="4916D6BDB7F78E6803698CAB32D1586EA457DFC8",
hash_type=HashType.SHA1,
data=self.data
).is_vaild())
def test_verify_hash_sha1_bad_result(self):
assert_false(ValidateHash(
result="987654321AC12345876543BCC34567862737FF20",
hash_type=HashType.SHA1,
data=self.data
).is_vaild())
def test_verify_hash_sha224_expected_result(self):
assert_true(ValidateHash(
result="88702E63237824C4EB0D0FCFE41469A462493E8BEB2A75BBE5981734",
hash_type=HashType.SHA224,
data=self.data
).is_vaild())
def test_verify_hash_sha224_bad_result(self):
assert_false(ValidateHash(
result="AACCEEDDFF928173647D0FBC09375847268EB88EEFF378592047583",
hash_type=HashType.SHA224,
data=self.data
).is_vaild())
def test_verify_hash_sha256_expected_result(self):
assert_true(ValidateHash(
result="40AFF2E9D2D8922E47AFD4648E6967497158785FBD1DA870E7110266BF944880",
hash_type=HashType.SHA256,
data=self.data
).is_vaild())
def test_verify_hash_sha256_bad_result(self):
assert_false(ValidateHash(
result="AF82E982D8922E47AFD4648E674ACE587BEEF85FBD1D0266BF944880123455FF",
hash_type=HashType.SHA256,
data=self.data
).is_vaild())
|
pdav/khal
|
tests/ui/tests_walker.py
|
Python
|
mit
| 2,702
| 0.00111
|
import datetime as dt
from freezegun import freeze_time
from khal.ui import DayWalker, DListBox, StaticDayWalker
from ..utils import LOCALE_BERLIN
from .canvas_render import CanvasTranslator
CONF = {'locale': LOCALE_BERLIN, 'keybindings': {},
'view': {'monthdisplay': 'firstday'},
'default': {'timedelta': dt.timedelta(days=3)},
}
palette = {
'date header focused': 'blue',
'date header': 'green',
'default': 'black',
}
@freeze_time('2017-6-7')
def test_daywalker(coll_vdirs):
collection, _ = coll_vdirs
this_date = dt.date.today()
daywalker = DayWalker(this_date, None, CONF, collection, delete_status=dict())
elistbox = DListBox(
daywalker, parent=None, conf=CONF,
delete_status=lambda: False,
toggle_delete_all=None,
toggle_delete_instance=None,
dynamic_days=True,
)
canvas = elistbox.render((50, 6), True)
assert CanvasTranslator(canvas, palette).transform() == \
"""\x1b[34mToday (Wednesday, 07.06.2017)\x1b[0m
\x1b[32mTomorrow (Thursday, 08.06.2017)\x1b[0m
\x1b[32mFriday, 09.06.2017 (2 days from now)\x1b[0m
\x1b[32mSaturday, 10.06.2017 (3 days from now)\x1b[0m
\x1b[32mSunday, 11.06.2017 (4 days from now)\x1b[0m
\x1b[32mMonday, 12.06.2017 (5 days from now)\x1b[0m
"""
@freeze_time('2017-6-7')
def test_staticdaywalker(coll_vdirs):
collection, _ = coll_vdirs
this_date = dt.date.today()
daywalker = StaticDayWalker(this_date, None, CONF, collection, delete_status=dict())
elistbox = DListBox(
daywalker, parent=None, conf=CONF,
delete_status=lambda:
|
False,
toggle_delete_all=None,
toggle_delete_instance=None,
dynamic_days=False,
)
canvas = elistbox.render((50, 10), True)
assert CanvasTranslator(canvas, palette).transform() == \
"""\x1b[34mToday (Wednesday, 07.06.2017)\x1b[0m
\x1b[32mTomorrow (Thursday, 08.06.2017)\x1b[0m
\x1b[32mFriday,
|
09.06.2017 (2 days from now)\x1b[0m
"""
@freeze_time('2017-6-7')
def test_staticdaywalker_3(coll_vdirs):
collection, _ = coll_vdirs
this_date = dt.date.today()
conf = dict()
conf.update(CONF)
conf['default'] = {'timedelta': dt.timedelta(days=1)}
daywalker = StaticDayWalker(this_date, None, conf, collection, delete_status=dict())
elistbox = DListBox(
daywalker, parent=None, conf=conf,
delete_status=lambda: False,
toggle_delete_all=None,
toggle_delete_instance=None,
dynamic_days=False,
)
canvas = elistbox.render((50, 10), True)
assert CanvasTranslator(canvas, palette).transform() == \
'\x1b[34mToday (Wednesday, 07.06.2017)\x1b[0m\n\n\n\n\n\n\n\n\n\n'
|
patrickporto/soldajustica
|
soldajustica/gallery/apps.py
|
Python
|
mit
| 123
| 0.008264
|
from django.apps import AppConfig
|
class GalleryAppConfi
|
g(AppConfig):
name = 'gallery'
verbose_name = 'Galeria'
|
salcho/antares
|
core/PluginManager.py
|
Python
|
mit
| 5,590
| 0.005546
|
'''
Created on Feb 28, 2013
@author: Santiago Diaz M - salchoman@gmail.com
'''
from core.plugs import fuzzdb_plugin
from core.utils.wsresponse_object import wsResponse
from core.data import logger
from core.Singleton import Singleton
import sys
import inspect
import threading
import Queue
import gtk
class PluginManager:
__metaclass__ = Singleton
def __init__(self):
logger.debug("Plugin Manager instansiated")
self.thread_pool = []
self.loaded_plugins = {}
# This queue is the job queue for the threads
self.request_queue = Queue.Queue()
# This list is filled by the attackThreads
self.response_list = []
# This dict will tell us which plugin sent a given payload
self.plugin_payload = {}
self.loadDefault()
"""
This function will load all default plugins
by getting all classes in fuzzdb_plugins
"""
def loadDefault(self):
for name, klass in inspect.getmembers(fuzzdb_plugin, inspect.isclass):
# Get all classes registered except by the ones being imported. This will probably change in the future
if name != 'IFuzzdbPlug' and name != 'attack_payloads' and name != 'regex' and name != 'IPlugin':
plug = klass()
self.loaded_plugins[plug.getName()] = plug
def addPlugin(self, classPath):
pass
"""
Start an attack to operation opName, set attack parameters as args
while using plugs collection of plugins.
The progress parameter is a function callback to update the progressbar
in the injector widget. This one's optional. Too messy?
Return wsResult list of objects.
"""
def startAttack(self, opName, args, plugs, num_threads, progress=None):
# Check required objects
from core.fwCore import core
wsdlhelper = core.iswsdlhelper()
if not wsdlhelper or not opName:
return None
self.response_list = []
# Spawn pool of threads
for i in range(num_threads):
t = attackThread(self.request_queue, self.response_list, wsdlhelper, i, opName, self.getPlugin)
t.setDaemon(True)
t.start()
self.thread_pool.append(t)
size = 0
cnt = 1
for plugin in plugs:
if plugin in self.loaded_plugins.keys():
payloads = self.loaded_plugins[plugin].getPayloads()
size += len(payloads)
for payload in payloads:
# Fill queue, fill plugin vs payload dict
if payload:
self.plugin_payload[payload] = plugin
self.request_queue.put([cnt, (args,payload)])
cnt += 1
#print 'Got response [%d]: %10s' % (sys.getsizeof(response), response)
# Wait till everyone finishes, update progress bar meanwhile
try:
per = 0
#while not self.request_queue.empty():
while per != 1:
per = 1-(float(self.request_queue.qsize())/size)
progress(percent=per, text=str(int(per*100)) + '%')
except:
pass
if self.respon
|
se_list:
# Report results to analyzer
core.initAnalyzer(self.response_list)
return self.response_list
return None
|
def stopAttack(self):
for thread in self.thread_pool:
thread.stop()
with self.request_queue.mutex:
self.request_queue.queue.clear()
# Return the plugin that sent this payload
def getPlugin(self, payload):
ret = None
try:
plugin = self.plugin_payload[payload]
ret = self.loaded_plugins[plugin]
except KeyError:
pass
return ret
def getLoadedPlugins(self):
return self.loaded_plugins
class attackThread(threading.Thread):
"""
This threads will receive:
Job queue
Result list to fill
Necessary wsdlhelper object for sending requests
Request identifier
Operation to be called
getPlugin callback from plugin manager to fill in the wsResponse object
"""
def __init__(self, queue, out_list, wsdlhelper, id, op_name, getPlugin):
threading.Thread.__init__(self)
self.queue = queue
self.out_list = out_list
self.wsdl = wsdlhelper
self.id = id
self.op_name = op_name
self.get_plugin = getPlugin
self._stop = threading.Event()
def run(self):
while True:
[req_id, (args, payload)] = self.queue.get()
if payload and args and req_id:
resp_object = self.wsdl.customRequest(self.op_name, args, payload)
if resp_object[0]:
response = wsResponse(id=req_id, params=args, size=sys.getsizeof(resp_object[0]), response=resp_object, payload=payload, plugin=self.get_plugin(payload))
# Report this to the appropiate plugin
plugin = response.getPlugin()
plugin.reportResult(response)
self.out_list.append(response)
else:
self.out_list.append(None)
logger.error("Empty response! Error sending request -> Args are: %s ; Payload is: %s" % (args, payload))
self.queue.task_done()
def stop(self):
self._stop.set()
|
DavidBarishev/DDtankFarmingBot
|
Ddtank_farm_bot/Framework/Capture.py
|
Python
|
gpl-3.0
| 1,336
| 0.000749
|
"""This modules is used to capture the screen
"""
import pyautogui
import time
import Globals
PATH = './Captur
|
es/'
def capture_area(area):
"""
Captures area of the screen
Args:
area (Tuple (x,y,width,height)): Area to capture
Returns:
Image : Image of the area captured
"""
img = pyautogui.screenshot(region=area)
return img
def save_area(area, filename=None):
"""
Saves area of the screen to file
Args:
|
area (Tuple (x,y,width,height)): Area to capture save
filename (String): File name to save
"""
if filename is None:
filename = ('area_snap_' + str(area).replace('(', ' ').replace(')', ' '))
save_img(capture_area(area=area), filename)
def get_game_screen():
"""
Get game screen image
Returns:
Image : Image of screen area
"""
return capture_area(area=Globals.GAME_REGION)
def save_game_screen(filename=('full_snap_' + str(time.time()))):
"""
Saves game area screen shot to file
Args:
filename (String): Name of file to save to
"""
save_img(get_game_screen(), filename)
def save_img(img, filename):
"""
Saves image to file
Args:
img (Image): Image to save
filename (String): Image save name
"""
img.save(PATH + filename + '.png')
|
kappapolls/kappapolls
|
kappahistory/migrations/0008_auto_20150303_2155.py
|
Python
|
gpl-2.0
| 649
| 0
|
# -*- coding
|
: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('kappahistory', '0007_auto_20150303_2154'),
]
operations = [
migrations.AddField(
model_name='drive',
name='name',
field=models.CharField(default='default', max_length=200),
preserve_default=False,
|
),
migrations.AlterField(
model_name='drive',
name='url',
field=models.URLField(null=True, blank=True),
preserve_default=True,
),
]
|
Juniper/tempest
|
tempest/scenario/test_volume_boot_pattern.py
|
Python
|
apache-2.0
| 10,161
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import testtools
from tempest.common import utils
from tempest.common import waiters
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.scenario import manager
CONF = config.CONF
LOG = logging.getLogger(__name__)
class TestVolumeBootPattern(manager.EncryptionScenarioTest):
# Boot from volume scenario is quite slow, and needs extra
# breathing room to get through deletes in the time allotted.
TIMEOUT_SCALING_FACTOR = 2
@classmethod
def skip_checks(cls):
super(TestVolumeBootPattern, cls).skip_checks()
if not CONF.volume_feature_enabled.snapshot:
raise cls.skipException("Cinder volume snapshots are disabled")
def _create_volume_from_image(self):
img_uuid = CONF.compute.image_ref
vol_name = data_utils.rand_name(
self.__class__.__name__ + '-volume-origin')
return self.create_volume(name=vol_name, imageRef=img_uuid)
def _get_bdm(self, source_id, source_type, delete_on_termination=False):
bd_map_v2 = [{
'uuid': source_id,
'source_type': source_type,
'destination_type': 'volume',
'boot_index': 0,
'delete_on_termination': delete_on_termination}]
return {'block_device_mapping_v2': bd_map_v2}
def _boot_instance_from_resource(self, source_id,
source_type,
keypair=None,
security_group=None,
delete_on_termination=False):
create_kwargs = dict()
if keypair:
create_kwargs['key_name'] = keypair['name']
if security_group:
create_kwargs['security_groups'] = [
{'name': security_group['name']}]
create_kwargs.update(self._get_bdm(
source_id,
source_type,
delete_on_termination=delete_on_termination))
return self.create_server(image_id='', **create_kwargs)
def _delete_server(self, server):
self.servers_client.delete_server(server['id'])
waiters.wait_for_server_termination(self.servers_client, server['id'])
@decorators.idempotent_id('557cd2c2-4eb8-4dce-98be-f86765ff311b')
@testtools.skipUnless(CONF.network.public_network_id,
'The public_network_id option must be specified.')
@utils.services('compute', 'volume', 'image')
def test_volume_boot_pattern(self):
"""This test case attempts to reproduce the following steps:
* Create in Cinder some bootable volume importing a Glance image
* Boot an instance from the bootable volume
* Write content to the volume
* Delete an instance and Boot a new instance from the volume
* Check written content in the instance
* Create a volume snapshot while the instance is running
* Boot an additional instance from the new snapshot based volume
* Check written content in the instance booted from snapshot
"""
LOG.info("Creating keypair and security group")
keypair = self.create_keypair()
security_group = self._create_security_group()
# create an instance from volume
LOG.info("Booting instance 1 from volume")
volume_origin = self._create_volume_from_image()
instance_1st = self._boot_instance_from_resource(
source_id=volume_origin['id'],
source_type='volume',
keypair=keypair,
security_group=security_group)
LOG.info("Booted first instance: %s", instance_1st)
# write content to volume on instance
LOG.info("Setting timestamp in instance %s", instance_1st)
ip_instance_1st = self.get_server_ip(instance_1st)
timestamp = self.create_timestamp(ip_instance_1st,
private_key=keypair['private_key'])
# delete instance
LOG.info("Deleting first instance: %s", instance_1st)
self._delete_server(instance_1st)
# create a 2nd instance from volume
instance_2nd = self._boot_instance_from_resource(
source_id=volume_origin['id'],
source_type='volume',
keypair=keypair,
security_group=security_group)
LOG.info("Booted second instance %s", instance_2nd)
# check the content of written file
LOG.info("Getting timestamp in instance %s", instance_2nd)
ip_instance_2nd = self.get_server_ip(instance_2nd)
timestamp2 = self.get_timestamp(ip_instance_2nd,
private_key=keypair['private_key'])
self.assertEqual(timestamp, timestamp2)
# snapshot a volume
LOG.info("Creating snapshot from volume: %s", volume_origin['id'])
snapshot = self.create_volume_snapshot(volume_origin['id'], force=True)
# create a 3rd instance from snapshot
LOG.info("Creating third instance from snapshot: %s", snapshot['id'])
volume = self.create_volume(snapshot_id=snapshot['id'],
size=snapshot['size'])
LOG.info("Booting third instance from snapshot")
server_from_snapshot = (
self._boot_instance_from_resource(source_id=volume['id'],
source_type='volume',
keypair=keypair,
security_group=security_group))
LOG.info("Booted third inst
|
ance %s", server_from_snapshot)
# check the content of written file
|
LOG.info("Logging into third instance to get timestamp: %s",
server_from_snapshot)
server_from_snapshot_ip = self.get_server_ip(server_from_snapshot)
timestamp3 = self.get_timestamp(server_from_snapshot_ip,
private_key=keypair['private_key'])
self.assertEqual(timestamp, timestamp3)
@decorators.idempotent_id('05795fb2-b2a7-4c9f-8fac-ff25aedb1489')
@decorators.attr(type='slow')
@utils.services('compute', 'image', 'volume')
def test_create_server_from_volume_snapshot(self):
# Create a volume from an image
boot_volume = self._create_volume_from_image()
# Create a snapshot
boot_snapshot = self.create_volume_snapshot(boot_volume['id'])
# Create a server from a volume snapshot
server = self._boot_instance_from_resource(
source_id=boot_snapshot['id'],
source_type='snapshot',
delete_on_termination=True)
server_info = self.servers_client.show_server(server['id'])['server']
# The created volume when creating a server from a snapshot
created_volume = server_info['os-extended-volumes:volumes_attached']
self.assertNotEmpty(created_volume, "No volume attachment found.")
created_volume_info = self.volumes_client.show_volume(
created_volume[0]['id'])['volume']
# Verify the server was created from the snapshot
self.assertEqual(
boot_volume['volume_image_metadata']['image_id'],
created_volume_info['volume_image_metadata']['image_id'])
self.assertEqual(boot_snapshot['id'],
created_volume_info['snapshot_id'])
self.assertEqual(server['id'],
created_volume_info['attachments'][0]['server_id'])
self.asser
|
AFFogarty/SEP-Bot
|
public/sep_search/models/__init__.py
|
Python
|
mit
| 90
| 0
|
from sep_
|
search.models.article import Article
from
|
sep_search.models.author import Author
|
UrLab/incubator
|
stock/migrations/0007_auto_20200904_2351.py
|
Python
|
agpl-3.0
| 587
| 0.001704
|
# Generated by Django 3.0.9 on 2020-09-04 21:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
|
('stock', '0006_auto_20200904_2343'),
]
operations = [
migrations.RemoveField(
model_name='paymenttransaction',
name='method',
),
migrations.AddField(
model_name='fundzone',
name='method',
field=models.CharField(choices=[('a', 'Card'), ('b', 'Cash')], default='a', max_length=1),
preserve_default=False,
),
|
]
|
ClaudiuGeorgiu/PlaystoreDownloader
|
playstoredownloader/playstore/meta.py
|
Python
|
mit
| 1,902
| 0.001052
|
#!/usr/bin/env python3
import logging
import requests
logger = logging.getLogger(__name__)
class PackageMeta:
def __init__(self, api, package_name) -> None:
self.api = api
self.package_name = package_name
self.details = self.app_details()
if not self.details:
exception = RuntimeError(
"Can't proceed with the download: th
|
ere was an error when "
f"requesting details for app '{self.package_name}'"
)
logging.exception(exception)
raise exception
def app_details(self) -> object:
"""
Get the details for a certain app (identified by the package name) in the
Google Play Store.
:return: A protobuf object containing the details of the app. The result
|
will be None if there was something wrong with the query.
"""
# Prepare the query.
path = "details"
query = {"doc": requests.utils.quote(self.package_name)}
# Execute the query.
# noinspection PyProtectedMember
response = self.api._execute_request(path, query)
# If the query went completely wrong.
try:
return response.payload.detailsResponse
except AttributeError as no_payload_error:
try:
logger.error(
f"Error for app '{self.package_name}': "
f"{response.commands.displayErrorMessage}"
)
raise no_payload_error
except AttributeError as no_commands_error:
logger.error(
f"There was an error when requesting details for "
f"app '{self.package_name}'"
)
raise no_commands_error from no_payload_error
def __getattr__(self, name: str):
return getattr(self.details, name)
|
sonymoon/algorithm
|
src/main/python/geeksforgeeks/list/mmerge-sort-for-linked-list.py
|
Python
|
apache-2.0
| 1,384
| 0
|
# key point is to find the half node
class Node:
def __init__(self, val):
self.val = val
self.next = None
class LinkList:
def __init__(self):
self.head = None
def push(self, val):
node = Node(val
|
)
if self.head:
node.next = self.head
self.head = node
else:
self.head = node
def printList(self):
p = self.head
while p:
print p.val,
p = p.next
print
def mergeSort(head):
if not head
|
:
return
if not head.next:
return
slow = head
fast = head.next
while fast:
fast = fast.next
if fast:
slow = slow.next
fast = fast.next
# 2 3 20 5 10 15
frontHalf = head
backHalf = slow.next
slow.next = None
mergeSort(frontHalf)
mergeSort(backHalf)
head = sortedMerge(frontHalf, backHalf)
return head
def sortedMerge(a, b):
if not a:
return b
elif not b:
return a
temp = None
if a.val <= b.val:
temp = a
a.next = sortedMerge(temp.next, b)
return a
else:
temp = b
b.next = sortedMerge(a, temp.next)
return b
ll = LinkList()
ll.push(15)
ll.push(10)
ll.push(5)
ll.push(20)
ll.push(3)
ll.push(2)
ll.printList()
ll.head = mergeSort(ll.head)
ll.printList()
|
pychess/pychess
|
utilities/arena.py
|
Python
|
gpl-3.0
| 6,460
| 0.008363
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
PyChess arena tournament script.
This script executes a tournament between the engines installed on your
system. The script is executed from a terminal with the usual environment.
'''
import os
import sys
###############################################################################
# Set up important things
from gi.repository import GLib
from gi.repository import GObject
GObject.threads_init()
mainloop = GLib.MainLoop()
from pychess.Utils.const import *
###############################################################################
# Fix environment
if "PYTHONPATH" in os.environ:
os.environ["PYTHONPATH"] = os.pathsep.join(
os.path.abspath(p) for p in os.environ["PYTHONPATH"].split(os.pathsep))
###############################################################################
from pychess.System import Log
Log.DEBUG = False
###############################################################################
# Do the rest of the imports
from pychess.Players.engineNest import discoverer
from pychess.Savers.pgn import save
from pychess.Utils.GameModel import GameModel
from pychess.Utils.TimeModel import TimeModel
from pychess.Variants import variants
###############################################################################
# Look up engines
def prepare():
print("Discovering engines", end=' ')
discoverer.connect('discovering_started', cb_started)
discoverer.connect('engine_discovered', cb_gotone)
discoverer.connect('all_engines_discovered', start)
discoverer.discover()
def cb_started(discoverer, binnames):
print("Wait a moment while we discover %d engines" % len(binnames))
def cb_gotone (discoverer, binname, engine):
sys.stdout
|
.write(".")
###############################################################################
# Ask the user for details
engines = []
results = []
minutes = 0
current = [0,0]
def start(discoverer):
global engines, results, minutes
engines
|
= discoverer.getEngines()
n = len(engines)
for i in range(n):
results.append([None]*n)
print()
print("Your installed engines are:")
for i, engine in enumerate(engines):
name = discoverer.getName(engine)
print("[%s] %s" % (name[:3], name))
print("The total amount of fights will be %d" % (n*(n-1)))
print()
minutes = int(input("Please enter the clock minutes for each game [n]: "))
print("The games will last up to %d minutes." % (2*n*(n-1)*minutes))
print("You will be informed of the progress as the games finish.")
print()
runGame()
###############################################################################
# Run games
def runGame():
a, b = findMatch()
if a == None:
print("All games have now been played. Here are the final scores:")
printResults()
mainloop.quit()
return
current[0] = a
current[1] = b
game = GameModel(TimeModel(minutes*60,0))
game.connect('game_started', cb_gamestarted)
game.connect('game_ended', cb_gameended)
p0 = discoverer.initPlayerEngine(engines[a], WHITE, 8, variants[NORMALCHESS], secs=minutes*60, incr=0, forcePonderOff=True)
p1 = discoverer.initPlayerEngine(engines[b], BLACK, 8, variants[NORMALCHESS], secs=minutes*60, incr=0, forcePonderOff=True)
game.setPlayers([p0,p1])
game.start()
def cb_gamestarted(game):
print("Starting the game between %s and %s" % tuple(game.players))
def cb_gameended(game, reason):
print("The game between %s and %s ended %s" % (tuple(game.players)+(reprResult[game.status],)))
if game.status not in (DRAW, WHITEWON, BLACKWON):
print("Something must have gone wrong. But we'll just try to continue!")
else:
i, j = current
results[i][j] = game.status
print("The current scores are:")
printScoreboard()
print()
with open("arena.pgn", "a+") as fh:
save(fh, game)
runGame()
###############################################################################
# A few helpers
def printScoreboard():
names = [discoverer.getName(e)[:3] for e in engines]
print(r"W\B", " ".join(names))
for i, nameA in enumerate(names):
print(nameA, end=' ')
for j, nameB in enumerate(names):
if i == j: print(" # ", end=' ')
elif results[i][j] == DRAW: print("½-½", end=' ')
elif results[i][j] == WHITEWON: print("1-0", end=' ')
elif results[i][j] == BLACKWON: print("0-1", end=' ')
else: print(" . ", end=' ')
print()
def printResults():
scores = []
for i in range(len(engines)):
points = sum(2 for j in range(len(engines)) if results[i][j] == WHITEWON) \
+ sum(1 for j in range(len(engines)) if results[i][j] == DRAW) \
+ sum(2 for j in range(len(engines)) if results[j][i] == BLACKWON) \
+ sum(1 for j in range(len(engines)) if results[j][i] == DRAW)
scores.append((points, i))
scores.sort(reverse=True)
for points, i in scores:
print(discoverer.getName(engines[i]), ":", points/2, "½"*(points%2))
#def findMatch():
# for i, engineA in enumerate(engines):
# for j, engineB in enumerate(engines):
# if i != j and results[i][j] == None:
# return i, j
# return None, None
import random
def findMatch():
pos = [(i,j) for i in range(len(engines))
for j in range(len(engines))
if i != j and results[i][j] == None]
#pos = [(i,j) for i,j in pos if
# "pychess" in discoverer.getName(engines[i]).lower() or
# "pychess" in discoverer.getName(engines[j]).lower()]
if not pos:
return None, None
return random.choice(pos)
###############################################################################
# Push onto the mainloop and start it
#glib.idle_add(prepare)
prepare()
def do(discoverer):
game = GameModel(TimeModel(60,0))
#game.connect('game_started', cb_gamestarted2)
game.connect('game_ended', lambda *a: mainloop.quit())
p0 = discoverer.initPlayerEngine(discoverer.getEngines()['rybka'], WHITE, 7, variants[NORMALCHESS], 60)
p1 = discoverer.initPlayerEngine(discoverer.getEngines()['gnuchess'], BLACK, 7, variants[NORMALCHESS], 60)
game.setPlayers([p0,p1])
game.start()
#discoverer.connect('all_engines_discovered', do)
#discoverer.start()
mainloop.run()
|
phalcon/readthedocs.org
|
readthedocs/core/djangome_urls.py
|
Python
|
mit
| 821
| 0.014616
|
from django.conf.urls.defaults import patterns, url
from urls import urlpatterns as main_patterns
ALL_VERSIONS_RE = '(?P<version>.+)'
urlpatterns = patterns(
'', # base view, flake8 complains if it is on the previous line.
url('^$',
'djangome.views.redirect_home',
{'version': 'latest'}),
url('^(?P<term>[\w\-\.]+)$',
'djangome.views.redirect_to_term',
{'version': 'latest'}),
url('^(?P<term>[\w\-\.]+)/stats$',
'djangome.views.show_term',
{'version': 'latest'}),
url('^%s/(?P<term>[\w\-\.]+)$' % ALL_VERSIONS_RE,
'djangome.views.redirect_to_term',
name='redirect_to_term'),
url('^%s/(?P<term>[
|
\w\-\.
|
]+)/stats$' % ALL_VERSIONS_RE,
'djangome.views.show_term',
name='show_term'),
)
urlpatterns += main_patterns
|
itJunky/web-tasker.py
|
db_repository/versions/026_migration.py
|
Python
|
gpl-2.0
| 1,017
| 0.001967
|
from sqlalchemy import *
from migrate import *
from migrate.changeset import schema
pre_meta = MetaData()
post_meta = MetaData()
project = Table('project', post_meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('name', String(length=255)),
)
project_association = Table('project_association', post_meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('user_id', Integer),
Column('project_id', Integer),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind
# migrate_engine to your metadata
pre_meta.bind = migrate_engine
post_m
|
eta.bind = migrate_engine
post_meta.tables['project'].create()
post_meta.tables['project_association'].create()
def downgrade(migrate_
|
engine):
# Operations to reverse the above upgrade go here.
pre_meta.bind = migrate_engine
post_meta.bind = migrate_engine
post_meta.tables['project'].drop()
post_meta.tables['project_association'].drop()
|
uw-it-aca/myuw
|
myuw/dao/affiliation.py
|
Python
|
apache-2.0
| 7,365
| 0
|
# Copyright 2022 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
"""
This module provides affiliations of the current user
"""
import logging
import traceback
from myuw.dao import log_err
from myuw.dao.exceptions import IndeterminateCampusException
from myuw.dao.enrollment import (
get_main_campus, get_cur_class_level, get_latest_class_level)
from myuw.dao.gws import (
is_clinician, is_staff_employee, is_student_employee,
is_alum_asso, is_student, is_grad_student, is_undergrad_student,
is_pce_student, is_seattle_student, is_bothell_student, is_tacoma_student,
is_applicant, is_grad_c2, is_undergrad_c2, in_hxtoolkit_group)
from myuw.dao.instructor import is_instructor
from myuw.dao.pws import (
get_employee_campus, is_employee, is_faculty, is_prior_employee,
is_prior_student, is_retiree, is_alumni)
from myuw.dao.uwnetid import is_2fa_permitted
from myuw.dao.student_profile import get_profile_of_current_user
logger = logging.getLogger(__name__)
def get_all_affiliations(request):
"""
return a dictionary of affiliation indicators.
The first class affiliations:
["all_employee"]: employee or clinician (include student employee)
["employee"]: True if is current employee (not student employee, clinician)
["clinician"]: True if in uw affiliation clinical groups
["faculty"]: True if the user is currently faculty.
["instructor"]: True if is instructor in the past 6 years
["staff_employee"]: True if the user is currently staff.
["student"]: True if the user is currently an UW student.
["stud_employee"]: True if the user is currently a student employee.
["grad"]: True if the user is currently an UW graduate student.
["undergrad"]: True if the user is currently an UW undergraduate student.
["applicant"]: True if the user is currently a UW applicant
["pce"]: True if the user is an UW PCE student.
["grad_c2"]: True if the user takes UW PCE grad courses
["undergrad_c2"]: True if the user takes UW PCE undergrad courses
["seattle"]: True if the user is an UW Seattle student
["bothell"]: True if the user is an UW Bothell student
["tacoma"]: True if the user is an UW Tacoma student
["official_seattle"]: True if the user is Seattle employee
["official_bothell"]: True if the user is Bothell employee
["official_tacoma"]: True if the user is Tacoma employee
["official_pce"]: waiting on sws to add a field in Enrollment.
["class_level"]: class level in current term enrollment.
["latest_class_level"]: the class level in the latest enrollment.
["F1"]: F1 international student
["J1"]: J1 international student
["intl_stud"]: F1 or J1 international student
["hxt_viewer"]: Husky Experience Toolkit viewer
["no_1st_class_affi"]: not applicant, current employee,
clinician, student, instructor
The following are secondary affiliations (without 1st_class_aff):
["alumni"]: True if the user is currently an UW alumni and NOT
current student, employee, applicant
["alum_asso"]: alumni association member
["retiree"]: True if the user is a retired staff and NOT
current applicant, student, employee
["past_employee"]: True if the user is a former employee and NOT
current student, applicant
["past_stud"]: True if the user is a former student and NOT
current employee, applicant
"""
if hasattr(request, 'myuw_user_affiliations'):
|
return request.myuw_user_affiliations
not_major_affi = (not is_applicant(request) and
not is_employee(request) and
not is_clinician(request) and
n
|
ot is_instructor(request) and
not is_student(request))
(is_sea_stud, is_undergrad, is_hxt_viewer) = get_is_hxt_viewer(request)
data = {"class_level": None,
"latest_class_level": get_latest_class_level(request),
"grad": is_grad_student(request),
"undergrad": is_undergrad,
"applicant": is_applicant(request),
"student": is_student(request),
"pce": is_pce_student(request),
"grad_c2": is_grad_c2(request),
"undergrad_c2": is_undergrad_c2(request),
"F1": False,
"J1": False,
"intl_stud": False,
"2fa_permitted": is_2fa_permitted(request),
"all_employee": is_employee(request) or is_clinician(request),
"clinician": is_clinician(request),
"employee": (is_employee(request) and
not is_student_employee(request)),
"faculty": is_faculty(request),
"instructor": is_instructor(request),
"staff_employee": is_staff_employee(request),
"stud_employee": is_student_employee(request),
"seattle": is_sea_stud,
"bothell": is_bothell_student(request),
"tacoma": is_tacoma_student(request),
"official_seattle": False,
"official_bothell": False,
"official_tacoma": False,
"hxt_viewer": is_hxt_viewer,
"alum_asso": is_alum_asso(request),
"alumni": is_alumni(request) and not_major_affi,
"retiree": is_retiree(request) and not_major_affi,
"past_employee": is_prior_employee(request) and not_major_affi,
"past_stud": is_prior_student(request) and not_major_affi,
"no_1st_class_affi": not_major_affi,
}
campuses = []
if data["student"]:
data["class_level"] = get_cur_class_level(request)
try:
sws_person = get_profile_of_current_user(request)
data["F1"] = sws_person.is_F1()
data["J1"] = sws_person.is_J1()
data["intl_stud"] = data["F1"] or data["J1"]
except Exception:
log_err(logger, "get_profile_of_current_user", traceback, request)
# enhance student campus with current and future enrollments
campuses = get_main_campus(request)
if len(campuses) > 0:
data["enrolled_stud"] = True
data['seattle'] = data['seattle'] or ('Seattle' in campuses)
data['bothell'] = data['bothell'] or ('Bothell' in campuses)
data['tacoma'] = data['tacoma'] or ('Tacoma' in campuses)
if data['seattle']:
data["hxt_viewer"] = (data["hxt_viewer"] or
data['seattle'] and data["undergrad"])
if is_employee(request):
# determine employee primary campus based on their mailstop
try:
employee_campus = get_employee_campus(request)
data['official_seattle'] = ('Seattle' == employee_campus)
data['official_bothell'] = ('Bothell' == employee_campus)
data['official_tacoma'] = ('Tacoma' == employee_campus)
except IndeterminateCampusException:
pass
request.myuw_user_affiliations = data
return data
def get_is_hxt_viewer(request):
is_sea_stud = is_seattle_student(request)
is_undergrad = is_undergrad_student(request)
# MUWM-4798
is_viewer = is_sea_stud and is_undergrad or in_hxtoolkit_group(request)
return (is_sea_stud, is_undergrad, is_viewer)
|
mbayon/TFG-MachineLearning
|
venv/lib/python3.6/site-packages/numpy/distutils/msvc9compiler.py
|
Python
|
mit
| 2,258
| 0.000443
|
from __future__ import division, absolute_import, print_function
import os
from distutils.msvc9compiler import MSVCCompiler as _MSVCCompiler
from .system_info import platform_bits
def _merge(old, new):
"""Concatenate two environment paths avoiding rep
|
eats.
Here `old` is the environment string before the base class initialize
function is called and `new` is the string after the call. The new string
|
will be a fixed string if it is not obtained from the current environment,
or the same as the old string if obtained from the same environment. The aim
here is not to append the new string if it is already contained in the old
string so as to limit the growth of the environment string.
Parameters
----------
old : string
Previous environment string.
new : string
New environment string.
Returns
-------
ret : string
Updated environment string.
"""
if not old:
return new
if new in old:
return old
# Neither new nor old is empty. Give old priority.
return ';'.join([old, new])
class MSVCCompiler(_MSVCCompiler):
def __init__(self, verbose=0, dry_run=0, force=0):
_MSVCCompiler.__init__(self, verbose, dry_run, force)
def initialize(self, plat_name=None):
# The 'lib' and 'include' variables may be overwritten
# by MSVCCompiler.initialize, so save them for later merge.
environ_lib = os.getenv('lib')
environ_include = os.getenv('include')
_MSVCCompiler.initialize(self, plat_name)
# Merge current and previous values of 'lib' and 'include'
os.environ['lib'] = _merge(environ_lib, os.environ['lib'])
os.environ['include'] = _merge(environ_include, os.environ['include'])
# msvc9 building for 32 bits requires SSE2 to work around a
# compiler bug.
if platform_bits == 32:
self.compile_options += ['/arch:SSE2']
self.compile_options_debug += ['/arch:SSE2']
def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
ld_args.append('/MANIFEST')
_MSVCCompiler.manifest_setup_ldargs(self, output_filename,
build_temp, ld_args)
|
nityansuman/Hack-Code
|
transistor_and_the_house.py
|
Python
|
mit
| 354
| 0
|
#
|
-*- coding: utf-8 -*-
"""
Created on Thu Nov 10 22:48:20 2016
@author: k nityan suman
"""
n, k = input().strip().split(' ')
n, k = [int(n), int(k)]
x = [int(x_temp) for x_temp in input().strip().split(' ')]
x.sort()
# print(x)
dist = x[-1] - x[0]
# print(dist
k = 2*k
# maximum number of transistor required
maxx = int
|
(dist / k)
|
LethusTI/supportcenter
|
vendor/django/tests/regressiontests/templates/custom.py
|
Python
|
gpl-3.0
| 24,013
| 0.010453
|
from __future__ import absolute_import
from django import template
from django.utils.unittest import TestCase
from .templatetags import custom
class CustomFilterTests(TestCase):
def test_filter(self):
t = template.Template("{% load custom %}{{ string|trim:5 }}")
self.assertEqual(
t.render(template.Context({"string": "abcdefghijklmnopqrstuvwxyz"})),
u"abcde"
)
class CustomTagTests(TestCase):
def verify_tag(self, tag, name):
self.assertEqual(tag.__name__, name)
self.assertEqual(tag.__doc__, 'Expected %s __doc__' % name)
self.assertEqual(tag.__dict__['anything'], 'Expected %s __dict__' % name)
def test_simple_tags(self):
c = template.Context({'value': 42})
t = template.Template('{% load custom %}{% no_params %}')
self.assertEqual(t.render(c), u'no_params - Expected result')
t = template.Template('{% load custom %}{% one_param 37 %}')
self.assertEqual(t.render(c), u'one_param - Expected result: 37')
t = template.Template('{% load custom %}{% explicit_no_context 37 %}')
self.assertEqual(t.render(c), u'explicit_no_context - Expected result: 37')
t = template.Template('{% load custom %}{% no_params_with_context %}')
self.assertEqual(t.render(c), u'no_params_with_context - Expected result (context value: 42)')
t = template.Template('{% load custom %}{% params_and_context 37 %}')
self.assertEqual(t.render(c), u'params_and_context - Expected result (context value: 42): 37')
t = template.Template('{% load custom %}{% simple_two_params 37 42 %}')
self.assertEqual(t.render(c), u'simple_two_params - Expected result: 37, 42')
t = template.Template('{% load custom %}{% simple_one_default 37 %}')
self.assertEqual(t.render(c), u'simple_one_default - Expected result: 37, hi')
t = template.Template('{% load custom %}{% simple_one_default 37 two="hello" %}')
self.assertEqual(t.render(c), u'simple_one_default - Expected result: 37, hello')
t = template.Template('{% load custom %}{% simple_one_default one=99 two="hello" %}')
self.assertEqual(t.render(c), u'simple_one_default - Expected result: 99, hello')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_one_default' received unexpected keyword argument 'three'",
template.Template, '{% load custom %}{% simple_one_default 99 two="hello" three="foo" %}')
t = template.Template('{% load custom %}{% simple_one_default 37 42 %}')
self.assertEqual(t.render(c), u'simple_one_default - Expected result: 37, 42')
t = template.Template('{% load custom %}{% simple_unlimited_args 37 %}')
self.assertEqual(t.render(c), u'simple_unlimited_args - Expected result: 37, hi')
t = template.Template('{% load custom %}{% simple_unlimited_args 37 42 56 89 %}')
self.assertEqual(t.render(c), u'simple_unlimited_args - Expected result: 37, 42, 56, 89')
t = template.Template('{% load custom %}{% simple_only_unlimited_args %}')
self.assertEqual(t.render(c), u'simple_only_unlimited_args - Expected result: ')
t = template.Template('{% load custom %}{% simple_only_unlimited_args 37 42 56 89 %}')
self.assertEqual(t.render(c), u'simple_only_unlimited_args - Expected result: 37, 42, 56, 89')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_two_params' received too many positional arguments",
template.Template, '{% load custom %}{% simple_two_params 37 42 56 %}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_one_default' received too many positional arguments",
template.Template, '{% load custom %}{% simple_one_default 37 42 56 %}')
t = template.Template('{% load custom %}{% simple_unlimited_args_kwargs 37 40|add:2 56 eggs="scrambled" four=1|add:3 %}')
self.assertEqual(t.render(c), u'simple_unlimited_args_kwargs - Expected result: 37, 42, 56 / eggs=scrambled, four=4')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_unlimited_args_kwargs' received some positional argument\(s\) after some keyword argument\(s\)",
template.Template, '{% load custom %}{% simple_unlimited_args_kwargs 37 40|add:2 eggs="scrambled" 56 four=1|add:3 %}')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_unlimited_args_kwargs' received multiple values for keyword argument 'eggs'",
template.Template, '{% load custom %}{% simple_unlimited_args_kwargs 37 eggs="scrambled" eggs="scrambled" %}')
def test_simple_tag_registration(self):
# Test that the decorators preserve the decorated function's docstring, name and attributes.
self.verify_tag(custom.no_params, 'no_params')
self.verify_tag(custom.one_param, 'one_param')
self.verify_tag(custom.explicit_no_context, 'explicit_no_context')
self.verify_tag(custom.no_params_with_context, 'no_params_with_context')
self.verify_tag(custom.params_and_context, 'params_and_context')
self.verify_tag(custom.simple_unlimited_args_kwargs, 'simple_unlimited_args_kwargs')
self.verify_tag(custom.simple_tag_without_context_parameter, 'simple_tag_without_context_parameter')
def test_simple_tag_missing_context(self):
# The 'context' parameter
|
must be present wh
|
en takes_context is True
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'simple_tag_without_context_parameter' is decorated with takes_context=True so it must have a first argument of 'context'",
template.Template, '{% load custom %}{% simple_tag_without_context_parameter 123 %}')
def test_inclusion_tags(self):
c = template.Context({'value': 42})
t = template.Template('{% load custom %}{% inclusion_no_params %}')
self.assertEqual(t.render(c), u'inclusion_no_params - Expected result\n')
t = template.Template('{% load custom %}{% inclusion_one_param 37 %}')
self.assertEqual(t.render(c), u'inclusion_one_param - Expected result: 37\n')
t = template.Template('{% load custom %}{% inclusion_explicit_no_context 37 %}')
self.assertEqual(t.render(c), u'inclusion_explicit_no_context - Expected result: 37\n')
t = template.Template('{% load custom %}{% inclusion_no_params_with_context %}')
self.assertEqual(t.render(c), u'inclusion_no_params_with_context - Expected result (context value: 42)\n')
t = template.Template('{% load custom %}{% inclusion_params_and_context 37 %}')
self.assertEqual(t.render(c), u'inclusion_params_and_context - Expected result (context value: 42): 37\n')
t = template.Template('{% load custom %}{% inclusion_two_params 37 42 %}')
self.assertEqual(t.render(c), u'inclusion_two_params - Expected result: 37, 42\n')
t = template.Template('{% load custom %}{% inclusion_one_default 37 %}')
self.assertEqual(t.render(c), u'inclusion_one_default - Expected result: 37, hi\n')
t = template.Template('{% load custom %}{% inclusion_one_default 37 two="hello" %}')
self.assertEqual(t.render(c), u'inclusion_one_default - Expected result: 37, hello\n')
t = template.Template('{% load custom %}{% inclusion_one_default one=99 two="hello" %}')
self.assertEqual(t.render(c), u'inclusion_one_default - Expected result: 99, hello\n')
self.assertRaisesRegexp(template.TemplateSyntaxError,
"'inclusion_one_default' received unexpected keyword argument 'three'",
template.Template, '{% load custom %}{% inclusion_one_default 99 two="hello" three="foo" %}')
t = template.Template('{% load custom %}{% inclusion_one_default 37 42 %}')
self.assertEqual(t.render(c), u'inclusion_one_default - Expected result: 37, 42\n')
t = template.Template('{% load custom %}{% inclusion_unlimited_args 37 %}')
self.assertEqual(t.render(c), u'inclusion_unlimited_args - Expected result: 37, hi\n')
t =
|
ashutoshvt/psi4
|
tests/pytests/test_np_views.py
|
Python
|
lgpl-3.0
| 2,472
| 0
|
"""
This is a simple script that verifies several ways of accessing numpy arrays
and ensures that their memory is properly cleaned.
"""
import pytest
from .addons import using
import numpy as np
import psi4
p
|
ytestmark = pytest.mark.quick
# If it's too small, something odd happens with the memory manager
mat_size = 10000
def snapshot_memory():
import memory_profiler as mp
return mp.memory_usage()[0] * 1048576
def check_leak(func, tol=1.e6):
start = snapshot_memory()
func()
diff = abs(start - snapshot_memory())
# A megabyte is excusable due to various GC funcs
if diff > tol:
raise MemoryError("Function did not correctly c
|
lean up")
else:
print("Function %s: PASSED" % func.__name__)
return True
def build_mat():
mat = psi4.core.Matrix(mat_size, mat_size)
return mat
def build_view_mat():
mat = psi4.core.Matrix(mat_size, mat_size)
view = mat.np
return mat, view
def build_viewh_mat():
mat = psi4.core.Matrix(mat_size, mat_size)
view = mat.np
return mat, view
def build_view_set_mat():
mat = psi4.core.Matrix(mat_size, mat_size)
view = mat.np
view[:] = 5
return mat, view
def build_arr_mat():
mat = psi4.core.Matrix(mat_size, mat_size)
view = np.asarray(mat)
return mat, view
def build_copy_mat():
mat = psi4.core.Matrix(mat_size, mat_size)
view = np.array(mat)
return mat, view
@using("memory_profiler")
def test_build_mat():
assert(check_leak(build_mat))
@using("memory_profiler")
def test_build_view_mat():
assert(check_leak(build_view_mat))
@using("memory_profiler")
def test_build_viewh_mat():
assert(check_leak(build_viewh_mat))
@using("memory_profiler")
def test_build_view_set_mat():
assert(check_leak(build_view_set_mat))
@using("memory_profiler")
def test_build_arr_mat():
assert(check_leak(build_arr_mat))
@using("memory_profiler")
def test_build_copy_mat():
assert(check_leak(build_copy_mat))
@using("memory_profiler")
def test_totals():
start = snapshot_memory()
check_leak(build_mat)
check_leak(build_view_mat)
check_leak(build_viewh_mat)
check_leak(build_view_set_mat)
check_leak(build_arr_mat)
check_leak(build_copy_mat)
# Double check totals
diff = abs(start - snapshot_memory())
if diff > 1.e6:
raise MemoryError("\nA function leaked %d bytes of memory!" % diff)
else:
print("\nNo leaks detected!")
|
jingriver/stocktracker
|
pytoolkit/regular_expression/pyqt3to4.py
|
Python
|
mit
| 4,237
| 0.032334
|
import re, sys, os
from subprocess import *
QT_IMPORT = {re.compile(r"\bfrom qt import\b"):"from PyQt4.Qt import",
re.compile(r"\bfrom qttable import\b"):"#from qttable import",
re.compile(r"\bfrom qtcanvas import\b"):"#from qtcanvas import"}
QT_CLS = {re.compile(r"\bQCanvasText\b"):"QGraphicsSimpleTextItem",
re.compile(r"\bQTable\b"):"QTableWidget",
re.compile(r"\bQDragObject\b"):"QMimeData",
re.compile(r"\bQIconDrag\b"):"QListWidget",
re.compile(r"\bQIconView\b"):"QListWidget",
re.compile(r"\bQTableItem\b"):"QTableWidgetItem",
re.compile(r"\bQListViewItem\b"):"QListWidgetItem",
re.compile(r"\bQCanvas\b"):"QGraphicsScene",
re.compile(r"\bQCanvasView\b"):"QGraphicsView",
re.compile(r"\bQCanvasEllipse\b"):"QGraphicsEllipseItem",
re.compile(r"\bQCanvasRectangle\b"):"QGraphicsRectItem",
re.compile(r"\bQDockWindow\b"):"QDockWidget",
re.compile(r"\bexec_loop\b"):"exec_",
re.compile(r"\bQPopupMenu\b"):"QMenu",
re.compile(r"\bsetNumCols\b"):"setColumnCount",
re.compile(r"\bPYSIGNAL\b"):"SIGNAL",
re.compile(r"\bsetOn\b"):"setChecked",
re.compile(r"\bsetCaption\b"):"setWindowTitle",
#re.compile(r"\binsertItem\b"):"addItem",
#re.compile(r"\bsetCurrentItem\b"):"setCurrentIndex",
re.compile(r"""\bnumRows\(\)"""):"rowCount()",
re.compile(r"""\bnumCols\(\)"""):"columnCount()",
}
#setWindowIcon(QtGui.QPixmap("image0"))
#setWindowIcon(QtGui.QIcon(QtGui.QPixmap("image0")))
def replace_emit(matchstr):
newstr = matchstr
rawstr = r"""emit\s*\(\s*SIGNAL\s*\([\s,\w,\",\']+\)\s*,\s*(\([\w,\,\s,\",\',\.]*\))"""
compile_obj = re.compile(rawstr)
match_obj = compile_obj.search(newstr)
|
while match_obj:
all_groups = match_obj.groups()
# Retrieve group(s) by index
group_1 = match_obj.group(1)
if group_1[0]=="(" and group_1[0]=="(":
repl=group_1[1:-1]
group_1 = "\(" + repl + "\)"
repl = repl.strip()
if repl=="":
group_1 = "\s*,\s*\(" + repl + "\)"
elif repl[-1]==",":
repl = repl[:-1]
|
print "[%s]----[%s]" % (group_1, repl)
# Replace string
newstr = re.sub(group_1,repl, newstr)
match_obj = compile_obj.search(newstr)
return newstr
def replace_gen_class(s):
#s = ' from genchartitemarroweditor import genChartItemArrowEditor'
#p = re.compile(r'from (?P<fname>\b\w+) import')
p = re.compile(r'from \bgen(\w+) \bimport \bgen(\w+)')
ms = p.findall(s)
clsnames = []
for m in ms:
cname = 'gen'+m[1]
clsnames.append(cname)
newstr = p.sub(r'from gen\1 import Ui_gen\2', s)
for c in clsnames:
rawstr = r"""(\b%s.__init__(.*))""" % c
p = re.compile(rawstr)
m = p.search(newstr)
if m:
print m.groups()
newstr = p.sub(r'\1;self.setupUi(self)', newstr)
for c in clsnames:
newstr = re.sub(r'\b(%s)\b' % c, 'Ui_'+c,newstr)
return newstr
def replace_name(s, d):
newstr = s
for p in d:
newstr = p.sub(d[p], newstr)
return newstr
def replace(fname):
f = open(fname)
s = f.read()
f.close()
#res = replace_gen_class(s)
#res = replace_name(res, QT_IMPORT)
#res = replace_name(res, QT_CLS)
res = replace_emit(s)
if s!=res:
print "processing " + os.path.split(fname)[1]
try:
save(fname+".bak", s)
save(fname, res)
except:
pass
def save(fname, content):
f = open(fname, 'w')
f.write(content)
f.close()
def dirpy(dirname):
try:
fnames = os.listdir(dirname)
fnames = filter(lambda x: str(x).lower().endswith('py'), fnames)
if 'pyqt3to4.py' in fnames: fnames.remove('pyqt3to4.py')
#fnames = filter(lambda x: str(x)!='pyqt3to4.py', fnames)
fnames = map(lambda x:os.path.join(dirname,x), fnames)
except:
fnames = [dirname]
map(replace, fnames)
if __name__=='__main__':
dirpy(sys.argv[1])
#f = sys.argv[1]
#print f
#f = 'chartitem.py'
#replace(f)
|
jbzdak/data-base-checker
|
bdcheckerapp/autograding/zaj5/unit5/task3.py
|
Python
|
gpl-3.0
| 1,459
| 0.004838
|
# -*- coding: utf-8 -*-
from bdchecker.api import NewDatabaseTaskChecker
from bdcheckerapp.autograding.zaj5.unit5.utils import Zaj5TaskChecker, UserList
class TaskChecker(NewDatabaseTaskChecker):
display_stdout = True
class TestSuite(Zaj5TaskChecker):
def test_has_procedure(self):
self.assert_has_procedure("add_user")
|
def test
|
_view_is_empty_at_the_beginning(self):
self.assertEqual(len(list(self.session.query(UserList.username))), 0,
msg="Widok \"LIST_USERS\" powinien być pusty zaraz po stworzeniu schematu")
def test_user_role_can_add_users(self):
user = self.get_session("user")
try:
user.execute("SELECT add_user('foo', 'bar');")
user.flush()
except Exception as e:
raise AssertionError("Rola \"user\" nie mogła wywołać unkcji add_user") from e
self.assertEqual(list(user.query(self.UserList.username)), [("foo",)], msg="Po wykonaniu metody add_user nie było użytkownika w bazie danych")
def test_user_is_created_properly(self):
self.session.execute("SELECT add_user('foo', 'bar');")
self.assertEqual(
list(self.session.query(self.Users.username, self.Users.is_admin)), [("foo", 0)],
msg="Po stworzeniu użytkownika za pomocą add_user okazało się że nie został on stworzony poprawnie.")
|
guorendong/iridium-browser-ubuntu
|
tools/telemetry/telemetry/user_story/user_story_set_unittest.py
|
Python
|
bsd-3-clause
| 2,649
| 0.004908
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from telemetry.story import shared_state
from telemetry import user_story
from telemetry.user_story import user_story_set
from telemetry.util import cloud_storage
# pylint: disable=abstract-method
class SharedStateBar(shared_state.SharedState):
pass
class UserStoryFoo(user_story.UserStory):
def __init__(self, name='', labels=None):
super(UserStoryFoo, self).__init__(
SharedStateBar, name, labels)
class U
|
serStorySetFoo(user_story_set.UserStorySet):
""" UserStorySetFoo is a user story created for testing purpose. """
pass
class UserStorySetTest(unittest.TestCase):
def testUserStoryTestName(self):
self.assertEquals('user_story_set_unittest', Us
|
erStorySetFoo.Name())
def testUserStoryTestDescription(self):
self.assertEquals(
' UserStorySetFoo is a user story created for testing purpose. ',
UserStorySetFoo.Description())
def testBaseDir(self):
uss = UserStorySetFoo()
base_dir = uss.base_dir
self.assertTrue(os.path.isdir(base_dir))
self.assertEqual(base_dir, os.path.dirname(__file__))
def testFilePath(self):
uss = UserStorySetFoo()
self.assertEqual(os.path.abspath(__file__).replace('.pyc', '.py'),
uss.file_path)
def testCloudBucket(self):
blank_uss = user_story_set.UserStorySet()
self.assertEqual(blank_uss.bucket, None)
public_uss = user_story_set.UserStorySet(
cloud_storage_bucket=cloud_storage.PUBLIC_BUCKET)
self.assertEqual(public_uss.bucket, cloud_storage.PUBLIC_BUCKET)
partner_uss = user_story_set.UserStorySet(
cloud_storage_bucket=cloud_storage.PARTNER_BUCKET)
self.assertEqual(partner_uss.bucket, cloud_storage.PARTNER_BUCKET)
internal_uss = user_story_set.UserStorySet(
cloud_storage_bucket=cloud_storage.INTERNAL_BUCKET)
self.assertEqual(internal_uss.bucket, cloud_storage.INTERNAL_BUCKET)
with self.assertRaises(ValueError):
user_story_set.UserStorySet(cloud_storage_bucket='garbage_bucket')
def testRemoveWithEmptySetRaises(self):
uss = user_story_set.UserStorySet()
foo_story = UserStoryFoo()
with self.assertRaises(ValueError):
uss.RemoveUserStory(foo_story)
def testBasicAddRemove(self):
uss = user_story_set.UserStorySet()
foo_story = UserStoryFoo()
uss.AddUserStory(foo_story)
self.assertEqual([foo_story], uss.user_stories)
uss.RemoveUserStory(foo_story)
self.assertEqual([], uss.user_stories)
|
gtesei/fast-furious
|
competitions/jigsaw-toxic-comment-classification-challenge/eda.py
|
Python
|
mit
| 11,538
| 0.012307
|
import sys
import numpy as np
import os
import pandas as pd
from sklearn import preprocessing
import re
from nltk.corpus import stopwords
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import TfidfVectorizer
from nltk.corpus import stopwords
from sklearn.ensemble import RandomForestClassifier
from sklearn.naive_bayes import MultinomialNB
from sklearn.svm import SVC
from sklearn.linear_model import SGDClassifier
from sklearn.metrics import accuracy_score , roc_auc_score , log_loss
import sklearn.linear_model as lm
from sklearn.model_selection import GridSearchCV
import Stemmer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text imp
|
ort TfidfTransformer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics import roc_auc_score, log_loss
from numpy import linalg as LA
from sklearn import neighbors
from sklearn.neural_network import MLPClassifier
from bs4 import BeautifulSoup
#import xgboost as xgb
import datetime as dt
# StemmedTfidfVectorizer
english_stemmer = Stemmer.Stemmer('en')
c
|
lass StemmedTfidfVectorizer(TfidfVectorizer):
def build_analyzer(self):
analyzer = super(TfidfVectorizer, self).build_analyzer()
return lambda doc: english_stemmer.stemWords(analyzer(doc))
def text_to_wordlist( review, remove_stopwords=False ):
# Function to convert a document to a sequence of words,
# optionally removing stop words. Returns a list of words.
#
# 1. Remove HTML
text = BeautifulSoup(review,'html.parser').get_text()
#
# 2. Remove non-letters
text = re.sub("[^A-za-z0-9^,?!.\/'+-=]"," ", text)
text = re.sub(r"what's", "what is ", text)
text = re.sub(r"\'s", " ", text)
text = re.sub(r"\'ve", " have ", text)
text = re.sub(r"can't", "cannot ", text)
text = re.sub(r"n't", " not ", text)
text = re.sub(r"i'm", "i am ", text)
text = re.sub(r"\'re", " are ", text)
text = re.sub(r"\'d", " would ", text)
text = re.sub(r"\'ll", " will ", text)
text = re.sub(r"\'scuse", " excuse ", text)
text = re.sub(r",", " ", text)
text = re.sub(r"\.", " ", text)
text = re.sub(r"!", " ! ", text)
text = re.sub(r"\?", " ? ", text)
#
# 3. Convert words to lower case and split them
words = text.lower().split()
#
# 4. Optionally remove stop words (false by default)
if remove_stopwords:
stops = set(stopwords.words("english"))
words = [w for w in words if not w in stops]
# 5. Return a list
return(words)
def clean_text( text ):
# Function to convert a document to a sequence of words,
# optionally removing stop words. Returns a list of words.
#
# 1. Remove HTML
#text = BeautifulSoup(review,'html.parser').get_text()
#
# 2. Remove non-letters
text = re.sub("[^A-za-z0-9^,?!.\/'+-=]"," ", text)
text = re.sub(r"what's", "what is ", text)
text = re.sub(r"\'s", " ", text)
text = re.sub(r"\'ve", " have ", text)
text = re.sub(r"can't", "cannot ", text)
text = re.sub(r"n't", " not ", text)
text = re.sub(r"i'm", "i am ", text)
text = re.sub(r"\'re", " are ", text)
text = re.sub(r"\'d", " would ", text)
text = re.sub(r"\'ll", " will ", text)
text = re.sub(r"\'scuse", " excuse ", text)
text = re.sub(r",", " ", text)
text = re.sub(r"\.", " ", text)
text = re.sub(r"!", " _exclamationmark_ ", text)
text = re.sub(r"\?", " _questionmark_ ", text)
#
return text
def build_data_set(ngram=3,stem=False,max_features=2000,min_df=2,remove_stopwords=True,holdout_perc=0.25,seed=123,debug=True):
train_data = pd.read_csv('data/train.csv')
if debug:
df = train_data.loc[:5000]
else:
print(">>> loading test set ...")
test = pd.read_csv('data/test.csv')
test.fillna('__NA__',inplace=True)
df = train_data
np.random.seed(seed)
perm = np.random.permutation(df.shape[0])
df = df.sample(frac=1).reset_index(drop=True)
##
clean_train_comments = []
for i in range(df.shape[0]):
#clean_train_comments.append(" ".join(text_to_wordlist(df["comment_text"][i], remove_stopwords)))
clean_train_comments.append( clean_text(df["comment_text"][i]) )
if not debug:
print(">>> processing test set ...")
for i in range(test.shape[0]):
clean_train_comments.append( clean_text(test["comment_text"][i]) )
qs = pd.Series(clean_train_comments).astype(str)
if not stem:
# 1-gram / no-stem
vect = TfidfVectorizer(analyzer=u'word',stop_words='english',min_df=min_df,ngram_range=(1, ngram),max_features=max_features)
ifidf_vect = vect.fit_transform(qs)
#print("ifidf_vect:", ifidf_vect.shape)
X = ifidf_vect.toarray()
if not debug:
X = X[:df.shape[0]]
else:
vect_stem = StemmedTfidfVectorizer(analyzer=u'word',stop_words='english',min_df=min_df,ngram_range=(1, ngram),max_features=max_features)
ifidf_vect_stem = vect_stem.fit_transform(qs)
#print("ifidf_vect_stem:", ifidf_vect_stem.shape)
X = ifidf_vect_stem.toarray()
if not debug:
X = X[:df.shape[0]]
Y = df[['toxic','severe_toxic','obscene','threat','insult','identity_hate']]
assert Y.shape[0] == X.shape[0]
## split
hold_out_obs = int(df.shape[0] * holdout_perc)
train_obs = df.shape[0] - hold_out_obs
# X
X_train = X[:train_obs]
X_holdout = X[train_obs:]
# Y_toxic
Y_train = Y[:train_obs]
Y_holdout = Y[train_obs:]
return X_train,X_holdout,Y_train,Y_holdout
#--------------------------- Main()
# conf
debug = False
if debug:
kfolds = 2
print(">>> Debug mode .")
else:
print(">>> Production mode .")
kfolds = 5
labels = ['toxic','severe_toxic','obscene','threat','insult','identity_hate']
# models
columns = ['1_gram',
'2_gram',
'3_gram',
'Stem',
'Max_Features',
'Classifier',
'Best_Parameters_CV',
'Best_LogLoss_CV',
'STD_CV',
'LogLoss_HOLDOUT',
'Resampling_Procedure']
perf_panels = { 'toxic': pd.DataFrame(data=np.zeros((0, len(columns))), columns=columns),
'severe_toxic': pd.DataFrame(data=np.zeros((0, len(columns))), columns=columns),
'obscene': pd.DataFrame(data=np.zeros((0, len(columns))), columns=columns),
'threat': pd.DataFrame(data=np.zeros((0, len(columns))), columns=columns),
'insult': pd.DataFrame(data=np.zeros((0, len(columns))), columns=columns),
'identity_hate': pd.DataFrame(data=np.zeros((0, len(columns))), columns=columns)
}
models = ['LogisticRegression','MultinomialNB']
parameters = {
'RandomForest': {"n_estimators": [100, 1000, 10000],
"max_depth": [3, 1, None],
"criterion": ["gini", "entropy"]},
'SVC': {'kernel': ['linear', 'rbf', 'poly'], 'C': [0.1, 1, 5, 10, 50, 100]},
'LogisticRegression': {'C': [0.1,0.8,1,1.2,10]},
'MultinomialNB': {'alpha': [0.1, 0.5, 0.9, 1]},
'KNeighborsClassifier': {'n_neighbors': [5, 10, 20, 50], 'weights': ['uniform', 'distance']},
'MLPClassifier': {'hidden_layer_sizes': [(1000, 50),(2000,100),(3000,200),(3000,1000,100)]}
}
#--------------------------- Assumptions to be tuned during next rounds
# - removed stopwords
# - removed numbers ... instead of replacing with _number_
# - min_df = 2 (min. freq)
# - no lemmatization
#-----------------------------------------------------------------
#--------------------------- pre-processing options
stem_options = [True,False]
grams = [1,2,3]
max_features = [500,2000,5000,10000]
#-----------------------------------------------------------------
# proc
t0 = dt.datetime.now()
iter = 1
for stem_option in stem_options:
for max_feat in max_features:
for gram in grams:
X_train,X_holdout,Y_train,Y_holdout = build_data_set(ngram=gram, stem=stem_option,max_features=max_feat,debug=
|
klusta-team/kwiklib
|
kwiklib/dataio/loader.py
|
Python
|
bsd-3-clause
| 17,513
| 0.00217
|
"""This module provides utility classes and functions to load spike sorting
data sets."""
# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
import os
import os.path
import re
from collections import Counter
import numpy as np
import pandas as pd
from qtools import QtGui, QtCore
from tools import (load_text, normalize,
load_binary, load_pickle, save_text, get_array,
first_row, load_binary_memmap)
from selection import (select, select_pairs, get_spikes_in_clusters,
get_some_spikes_in_clusters, get_some_spikes, get_indices)
from kwiklib.utils.logger import (debug, info, warn, exception, FileLogger,
register, unregister)
from kwiklib.utils.colors import COLORS_COUNT, generate_colors, next_color
# -----------------------------------------------------------------------------
# Default cluster/group info
# -----------------------------------------------------------------------------
def default_cluster_info(clusters_unique):
n = len(clusters_unique)
cluster_info = pd.DataFrame({
'color': generate_colors(n),
'group': 3 * np.ones(n)},
dtype=np.int32,
index=clusters_unique)
# Put cluster 0 in group 0 (=noise), cluster 1 in group 1 (=MUA)
if 0 in clusters_unique:
cluster_info['group'][0] = 0
if 1 in clusters_unique:
cluster_info['group'][1] = 1
return cluster_info
def default_group_info():
group_info = np.zeros((4, 3), dtype=object)
group_info[:, 0] = np.arange(4)
group_info[:, 1] = generate_colors(group_info.shape[0])
group_info[:, 2] = np.array(['Noise', 'MUA', 'Good', 'Unsorted'],
dtype=object)
group_info = pd.DataFrame(
{'color': group_info[:, 1].astype(np.int32),
'name': group_info[:, 2]},
index=group_info[:, 0].astype(np.int32))
return group_info
# -----------------------------------------------------------------------------
# Cluster renumbering
# -----------------------------------------------------------------------------
def reorder(x, order):
x_reordered = np.zeros_like(x)
for i, o in enumerate(or
|
der):
x_reordered[x == o] = i
return x_reordered
def renumber_clusters(clusters, cluster_info):
clusters_unique = get_array(get_indices(cluster_info))
ncluster
|
s = len(clusters_unique)
assert np.array_equal(clusters_unique, np.unique(clusters))
clusters_array = get_array(clusters)
groups = get_array(cluster_info['group'])
colors = get_array(cluster_info['color'])
groups_unique = np.unique(groups)
# Reorder clusters according to the group.
clusters_unique_reordered = np.hstack(
[sorted(clusters_unique[groups == group]) for group in groups_unique])
# WARNING: there's a +2 offset to avoid conflicts with the old convention
# cluster 0 = noise, cluster 1 = MUA.
clusters_renumbered = reorder(clusters_array, clusters_unique_reordered) + 2
cluster_permutation = reorder(clusters_unique_reordered, clusters_unique)
# Reorder cluster info.
groups_reordered = groups[cluster_permutation]
colors_reordered = colors[cluster_permutation]
# Recreate new cluster info.
cluster_info_reordered = pd.DataFrame({'color': colors_reordered,
'group': groups_reordered}, dtype=np.int32,
index=(np.arange(nclusters) + 2))
return clusters_renumbered, cluster_info_reordered
# -----------------------------------------------------------------------------
# Generic Loader class
# -----------------------------------------------------------------------------
class Loader(QtCore.QObject):
progressReported = QtCore.pyqtSignal(int, int)
saveProgressReported = QtCore.pyqtSignal(int, int)
# Progress report
# ---------------
def report_progress(self, index, count):
self.progressReported.emit(index, count)
def report_progress_save(self, index, count):
self.saveProgressReported.emit(index, count)
# Initialization methods
# ----------------------
def __init__(self, parent=None, filename=None, userpref=None):
"""Initialize a Loader object for loading Klusters-formatted files.
Arguments:
* filename: the full path of any file belonging to the same
dataset.
"""
super(Loader, self).__init__()
self.spikes_selected = None
self.clusters_selected = None
self.override_color = False
if not userpref:
# HACK: if no UserPref is given in argument to the loader,
# use a mock dictionary returning None all the time.
class MockDict(object):
def __getitem__(self, name):
return None
userpref = MockDict()
self.userpref = userpref
if filename:
self.filename = filename
self.open(self.filename)
def open(self, filename=None):
"""Open everything."""
pass
def open_spikes(self):
"""Open just spike-related information."""
def open_traces(self):
"""Open just trace information."""
def open_aesthetic(self):
"""Open aesthetic visualization-related information."""
# Input-Output methods
# --------------------
def read(self):
pass
def save(self):
pass
def close(self):
pass
# Access to the data: spikes
# --------------------------
def select(self, spikes=None, clusters=None):
if clusters is not None:
spikes = get_spikes_in_clusters(clusters, self.clusters)
self.spikes_selected = spikes
self.clusters_selected = clusters
def unselect(self):
self.select(spikes=None, clusters=None)
def get_clusters_selected(self):
return self.clusters_selected
def has_selection(self):
return self.clusters_selected is not None and len(self.clusters_selected) > 0
def get_clusters_unique(self):
return self.clusters_unique
def get_features(self, spikes=None, clusters=None):
if clusters is not None:
spikes = get_spikes_in_clusters(clusters, self.clusters)
if spikes is None:
spikes = self.spikes_selected
return select(self.features, spikes)
def get_features_background(self):
return self.features
def get_some_features(self, clusters=None):
"""Return the features for a subset of all spikes: a large number
of spikes from any cluster, an a controlled subset of the selected
clusters."""
if clusters is None:
clusters = self.clusters_selected
if clusters is not None:
spikes_background = get_some_spikes(self.clusters,
nspikes_max=self.userpref['features_nspikes_background_max'],)
spikes_clusters = get_some_spikes_in_clusters(
clusters,
self.clusters,
counter=self.counter,
nspikes_max_expected=self.userpref[
'features_nspikes_selection_max'],
nspikes_per_cluster_min=self.userpref[
'features_nspikes_per_cluster_min'])
spikes = np.union1d(spikes_background, spikes_clusters)
else:
spikes = self.spikes_selected
return select(self.features, spikes)
def get_spiketimes(self, spikes=None, clusters=None):
if clusters is not None:
spikes = get_spikes_in_clusters(clusters, self.clusters)
if spikes is None:
spikes = self.spikes_selected
spiketimes = getattr(self, 'spiketimes', getattr(self, 'spiketimes_res', None))
return select(spiketimes, spikes)
def get_clusters(self, spikes=None, clusters=None):
if clusters is not None:
spikes = get_spikes_in_clusters(clusters, self.clusters)
if spikes is None:
spikes = self.spikes_selected
return select(self.clusters, spikes)
def get_masks(self, spikes=None, full=None, clusters=None):
if clusters is not None:
spike
|
joelbitar/rfinder
|
analyzer/show.py
|
Python
|
lgpl-3.0
| 4,016
| 0.005976
|
import re
import settings
from analyzer import Analyzer
class ShowAnalyzer(Analyzer):
patterns_and_values = [
(r'.*season[\s_\-\.](\d{1,2}).*', 60, ('season', None)),
(r'.*season(\d{1,2}).*', 60, ('season', None)),
(r'.*s(\d{1,2})[ex](\d{1,2}).*', 80, ('season', 'episode')), # ____s01e01____
(r'.*s(\d{1,2}).*', 40, ('season', None)),
(r'.*del\.(\d{1,2})\.av\.\d{1,2}.*', 15, ('episode', None)), # ____X.av.X.
(r'^(\d)(\d{2})\s\-\s.*', 30, ('season', 'episode')), # 101 ______
(r'.*\.(\d)[ex](\d{2})\..*', 30, ('season', 'episode')), # ____1x01 ______
(r'.*(\d{4})\.e(\d{1,2}).*', 20, ('season', 'episode')), # ____213.E10___
(r'.*\.(\d{4})\.\d{2}\.\d{2}\..*', 18, ('season', None)),
(r'.*[\.\s]ep(\d{1,2}).*', 15, ('episode', None)),
(r'.*[\.\s]episode(\d{1,2}).*', 15, ('episode', None)),
(r'^.*[\s\.\-]e(\d{1,2})[\s\.\-].*', 20, ('episode', None)), # ___E01___
]
def get_confidence(self):
# Now.. for the fun part!
confidences = [0]
for confidence, match, mapping in self.get_matches():
confidences.append(confidence)
return max(confidences)
# Match the folder names
def get_season(self):
confidence, match_groups, mapping = self.get_matches()[0]
if 'season' in mapping:
try:
return int(match_groups[mapping.index('season')])
except:
pass
return None
def get_episode(self):
confidence, match_groups, mapping = self.get_matches()[0]
if 'episode' in mapping:
try:
return int(match_groups[mapping.index('episode')])
except:
pass
return None
def get_show_properties(self):
season = self.get_season()
episode = self.get_episode()
return {
'season' : season,
'episode' : episode,
'show_name' : self.get_show_name()
}
def get_show_name(self):
showname_regexps = (
r'(.*)[\s_\-\.]+complete[\s_\-\.]+season.*',
r'(.*)[\s_\-\.]+season.*',
r'(.*)\.\d{4}\.\d{2}\.\d{2}\..*', #Talkshow kind of thing
r'(.*)\.\d{4}\.e\d{2}\..*', # Other shows, based on year.
r'(.*)\.\d[ex]\d{2}\..*',
r'(.*)[\s_\-\.]+s\d{1,2}.*', # Show.s01
r'(.*)[\s_\-\.]+s\d{1,2}[ex]\d{1,2}[\s_\-\.].*',
r'(.*)[\s_\-\.]\d{3}.*',
r'(.*)[\s_\-\.]e\d{2}.*',
r'(.*)del\.\d{1,2}\.av\.\d{1,2}.*',# del.X.av.X.
)
# Iterate of over the paths backwards
for path_part in self.file.get_path_parts():
for regexp in showname_regexps:
match = re.match(regexp,path_part, re.IGNORECASE)
cleaned_name = False
if match:
cleaned_name = sel
|
f.get_cleaned_name(" ".join(match.groups()))
if cleaned_name:
return cleaned_name
for path_part in self.file.get_path_parts():
cleaned_name = self.get_cleaned_name(path_part)
if cleaned_name:
return cleaned_name
def get_pretty_path_season_name(self):
season = self.get_season() or 1
season_str = str(season)
if len(season_str) == 4:
|
return str(season)
if season <= 9:
season_str = '0' + season_str
return "%s %s" % (
settings.SEASON_FOLDER_NAME,
season_str
)
def get_pretty_path_list(self):
return [
self.get_show_name(),
self.get_pretty_path_season_name()
]
def get_pretty_path(self):
pretty_path = self.get_absolut_path(
[settings.FOLDERS_TV_SHOWS] + self.get_pretty_path_list()
)
if settings.VERBOSE:
'Pretty path: %s' % (pretty_path)
return pretty_path
|
patrickm/chromium.src
|
content/test/gpu/run_gpu_test.py
|
Python
|
bsd-3-clause
| 416
| 0.007212
|
#!/usr/bin/env python
#
|
Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, 'tools', 'telemetry'))
from telemetry import test_runner
if __name__ == '__main__':
sys.exit(test_runner
|
.Main())
|
alxgu/ansible
|
lib/ansible/plugins/callback/selective.py
|
Python
|
gpl-3.0
| 10,438
| 0.002491
|
# (c) Fastly, inc 2016
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
callback: selective
callback_type: stdout
requirements:
- set as main display callback
short_description: only print certain tasks
version_added: "2.4"
description:
- This callback only prints tasks that have been tagged with `print_action` or that have failed.
This allows operators to focus on the tasks that provide value only.
- Tasks that are not printed are placed with a '.'.
- If you increase verbosity all tasks are printed.
options:
nocolor:
default: False
description: This setting allows suppressing colorizing output
env:
- name: ANSIBLE_NOCOLOR
- name: ANSIBLE_SELECTIVE_DONT_COLORIZE
ini:
- section: defaults
key: nocolor
type: boolean
"""
EXAMPLES = """
- debug: msg="This will not be printed"
- debug: msg="But this will"
tags: [print_action]
"""
import difflib
from ansible import constants as C
from ansible.plugins.callback import CallbackBase
from ansible.module_utils._text import to_text
from ansible.utils.color import codeCodes
DONT_COLORIZE = False
COLORS = {
'normal': '\033[0m',
'ok': '\033[{0}m'.format(codeCodes[C.COLOR_OK]),
'bold': '\033[1m',
'not_so_
|
bold': '\033[1m\033[34m',
'changed': '\033[{0}m'.format(codeCodes[C.COLOR_CHANGED]),
'failed': '\033[{0}m
|
'.format(codeCodes[C.COLOR_ERROR]),
'endc': '\033[0m',
'skipped': '\033[{0}m'.format(codeCodes[C.COLOR_SKIP]),
}
def dict_diff(prv, nxt):
"""Return a dict of keys that differ with another config object."""
keys = set(prv.keys() + nxt.keys())
result = {}
for k in keys:
if prv.get(k) != nxt.get(k):
result[k] = (prv.get(k), nxt.get(k))
return result
def colorize(msg, color):
"""Given a string add necessary codes to format the string."""
if DONT_COLORIZE:
return msg
else:
return '{0}{1}{2}'.format(COLORS[color], msg, COLORS['endc'])
class CallbackModule(CallbackBase):
"""selective.py callback plugin."""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'selective'
def __init__(self, display=None):
"""selective.py callback plugin."""
super(CallbackModule, self).__init__(display)
self.last_skipped = False
self.last_task_name = None
self.printed_last_task = False
def set_options(self, task_keys=None, var_options=None, direct=None):
super(CallbackModule, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
global DONT_COLORIZE
DONT_COLORIZE = self.get_option('nocolor')
def _print_task(self, task_name=None):
if task_name is None:
task_name = self.last_task_name
if not self.printed_last_task:
self.printed_last_task = True
line_length = 120
if self.last_skipped:
print()
msg = colorize("# {0} {1}".format(task_name,
'*' * (line_length - len(task_name))), 'bold')
print(msg)
def _indent_text(self, text, indent_level):
lines = text.splitlines()
result_lines = []
for l in lines:
result_lines.append("{0}{1}".format(' ' * indent_level, l))
return '\n'.join(result_lines)
def _print_diff(self, diff, indent_level):
if isinstance(diff, dict):
try:
diff = '\n'.join(difflib.unified_diff(diff['before'].splitlines(),
diff['after'].splitlines(),
fromfile=diff.get('before_header',
'new_file'),
tofile=diff['after_header']))
except AttributeError:
diff = dict_diff(diff['before'], diff['after'])
if diff:
diff = colorize(str(diff), 'changed')
print(self._indent_text(diff, indent_level + 4))
def _print_host_or_item(self, host_or_item, changed, msg, diff, is_host, error, stdout, stderr):
if is_host:
indent_level = 0
name = colorize(host_or_item.name, 'not_so_bold')
else:
indent_level = 4
if isinstance(host_or_item, dict):
if 'key' in host_or_item.keys():
host_or_item = host_or_item['key']
name = colorize(to_text(host_or_item), 'bold')
if error:
color = 'failed'
change_string = colorize('FAILED!!!', color)
else:
color = 'changed' if changed else 'ok'
change_string = colorize("changed={0}".format(changed), color)
msg = colorize(msg, color)
line_length = 120
spaces = ' ' * (40 - len(name) - indent_level)
line = "{0} * {1}{2}- {3}".format(' ' * indent_level, name, spaces, change_string)
if len(msg) < 50:
line += ' -- {0}'.format(msg)
print("{0} {1}---------".format(line, '-' * (line_length - len(line))))
else:
print("{0} {1}".format(line, '-' * (line_length - len(line))))
print(self._indent_text(msg, indent_level + 4))
if diff:
self._print_diff(diff, indent_level)
if stdout:
stdout = colorize(stdout, 'failed')
print(self._indent_text(stdout, indent_level + 4))
if stderr:
stderr = colorize(stderr, 'failed')
print(self._indent_text(stderr, indent_level + 4))
def v2_playbook_on_play_start(self, play):
"""Run on start of the play."""
pass
def v2_playbook_on_task_start(self, task, **kwargs):
"""Run when a task starts."""
self.last_task_name = task.get_name()
self.printed_last_task = False
def _print_task_result(self, result, error=False, **kwargs):
"""Run when a task finishes correctly."""
if 'print_action' in result._task.tags or error or self._display.verbosity > 1:
self._print_task()
self.last_skipped = False
msg = to_text(result._result.get('msg', '')) or\
to_text(result._result.get('reason', ''))
stderr = [result._result.get('exception', None),
result._result.get('module_stderr', None)]
stderr = "\n".join([e for e in stderr if e]).strip()
self._print_host_or_item(result._host,
result._result.get('changed', False),
msg,
result._result.get('diff', None),
is_host=True,
error=error,
stdout=result._result.get('module_stdout', None),
stderr=stderr.strip(),
)
if 'results' in result._result:
for r in result._result['results']:
failed = 'failed' in r
stderr = [r.get('exception', None), r.get('module_stderr', None)]
stderr = "\n".join([e for e in stderr if e]).strip()
self._print_host_or_item(r['item'],
r.get('changed', False),
to_text(r.get('msg', '')),
r.get('diff', None),
is_host=False,
error=failed,
stdout=r.get('module_stdout', None),
stderr=stderr.strip(),
|
google/flight-lab
|
controller/utils/display.py
|
Python
|
apache-2.0
| 3,167
| 0.003158
|
# Copyright 2018 Flight Lab authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility library for displaying arbitrary content on a machine."""
import jinja2
import tempfile
from common import pattern
from utils import app
class Display(pattern.Closable):
"""Class for displaying arbitrary content on a machine.
The implementatio
|
n assumes Chrome browser is available on given machine and
use it to display generated html content in kiosk mode so it appears as an app
and works on any platform.
"""
def __init__(self, chrome_path, *args, **kwargs):
"""Crea
|
tes Display instance.
Args:
chrome_path: path to chrome executable.
"""
super(Display, self).__init__(*args, **kwargs)
self._chrome_path = chrome_path
self._temp_path = tempfile.gettempdir()
self._index_file = tempfile.mktemp(suffix='.html')
self._chrome_app = app.Application(
name='Browser',
bin_path=chrome_path,
arguments=[
'--kiosk', self._index_file, '--new-window', '--incognito',
'--noerrordialogs', '--user-data-dir={0}'.format(self._temp_path)
],
restart_on_crash=True)
def close(self):
"""Closes Chrome browser."""
self._chrome_app.stop()
def show_message(self, message, template_path='./data/display_message.html'):
"""Shows a text message in full screen.
Args:
message: text to show.
template_path: a html template to use. It should contain "{{ message }}".
"""
self._generate_page(
template_path=template_path, kwargs={
'message': message
})
self._relaunch()
def show_image(self,
image_path,
template_path='./data/display_image_default.html'):
"""Shows an image in full screen.
Current implementation only displays the image at (0,0) and at its original
size. If image is smaller than screen size, the rest area will be white. If
image is larger than screen size, it will be clipped and scrollbar will
appear.
Args:
image_path: a locally accessible path to image file.
template_path: a html template to use. It should contain
"{{ image_path }}".
"""
self._generate_page(
template_path=template_path, kwargs={
'image_path': image_path
})
self._relaunch()
def _generate_page(self, template_path, kwargs={}):
with open(template_path, 'r') as f:
template = jinja2.Template(f.read())
with open(self._index_file, 'w') as f:
f.write(template.render(**kwargs))
def _relaunch(self):
self._chrome_app.stop()
self._chrome_app.start()
|
imageio/imageio
|
tests/test_pillow.py
|
Python
|
bsd-2-clause
| 19,210
| 0.000729
|
""" Tests for imageio's pillow plugin
"""
from pathlib import Path
from imageio.core.request import Request
import os
import io
import pytest
import numpy as np
from PIL import Image, ImageSequence
import imageio as iio
from imageio.core.v3_plugin_api import PluginV3
from imageio.plugins.pillow import PillowPlugin
from imageio.core.request import InitializationError
@pytest.mark.parametrize(
"im_npy,im_out,im_comp",
[
("chelsea.npy", "iio.png", "pil.png"),
("chelsea.npy", "iio.jpg", "pil.jpg"),
("chelsea.npy", "iio.jpeg", "pil.jpg"),
("chelsea.npy", "iio.bmp", "pil.bmp"),
],
)
def test_write_single_frame(test_images, tmp_path, im_npy, im_out, im_comp):
# the base image as numpy array
im = np.load(test_images / im_npy)
# written with imageio
iio_file = tmp_path / im_out
iio.v3.imwrite(iio_file, im, plugin="pillow")
# written with pillow directly
pil_file = tmp_path / im_comp
Image.fromarray(im).save(pil_file)
# file exists
assert os.path.exists(iio_file)
# imageio content matches pillow content
assert iio_file.read_bytes() == pil_file.read_bytes()
@pytest.mark.parametrize(
"im_npy,im_out,im_comp",
[
# Note: There might be a problem with reading/writing frames
# Tracking Issue: https://github.com/python-pillow/Pillow/issues/5307
("newtonscradle_rgb.npy", "iio.gif", "pil.gif"),
# ("newtonscradle_rgba.npy", "iio.gif", "pil.gif"),
],
)
@pytest.mark.needs_internet
def test_write_multiframe(test_images, tmp_path, im_npy, im_out, im_comp):
# the base image as numpy array
im = np.load(test_images / im_npy)
# written with imageio
iio_file = tmp_path / im_out
iio.v3.imwrite(iio_file, im, plugin="pillow")
# written with pillow directly
pil_file = tmp_path / im_comp
pil_images = [Image.fromarray(frame) for frame in im]
pil_images[0].save(pil_file, save_all=True, append_images=pil_images[1:])
# file exists
assert os.path.exists(iio_file)
# imageio content matches pillow content
assert iio_file.read_bytes() == pil_file.read_bytes()
@pytest.mark.parametrize(
"im_in,mode",
[
("chelsea.png", "RGB"),
("chelsea.jpg", "RGB"),
("chelsea.bmp", "RGB"),
("newtonscradle.gif", "RGB"),
("newtonscradle.gif", "RGBA"),
],
)
def test_read(test_images, im_in, mode):
im_path = test_images / im_in
iio_im = iio.v3.imread(im_path, plugin="pillow", mode=mode, index=None)
pil_im = np.asarray(
[
np.array(frame.convert(mode))
for frame in ImageSequence.Iterator(Image.open(im_path))
]
)
assert np.allclose(iio_im, pil_im)
@pytest.mark.parametrize(
"im_in,mode",
[
("newtonscradle.gif", "RGB"),
("newtonscradle.gif", "RGBA"),
],
)
def test_gif_legacy_pillow(test_images, im_in, mode):
"""
This test tests backwards compatibility of using the new API
with a legacy plugin. IN particular reading ndimages
I'm not sure where this test should live, so it is here for now.
"""
|
im_path = test_images / im_in
with iio.imopen(im_path, "r", legacy_mode=True, plugin="GIF-PIL") as file:
iio_im = file.read(pilmode=mode, index=None)
pil_im = np.asarray(
[
np.array(frame.convert(mode))
for frame in ImageSequence.Iterator(Image.open(im_path))
]
)
if pil_im.shape[0] == 1:
pil_im = pil_im.squeeze(axis=0)
assert np.allclose(iio_im, pil_im)
def test_png_compre
|
ssion(test_images, tmp_path):
# Note: Note sure if we should test this or pillow
im = np.load(test_images / "chelsea.npy")
iio.v3.imwrite(tmp_path / "1.png", im, plugin="pillow", compress_level=0)
iio.v3.imwrite(tmp_path / "2.png", im, plugin="pillow", compress_level=9)
size_1 = os.stat(tmp_path / "1.png").st_size
size_2 = os.stat(tmp_path / "2.png").st_size
assert size_2 < size_1
def test_png_quantization(test_images, tmp_path):
# Note: Note sure if we should test this or pillow
im = np.load(test_images / "chelsea.npy")
iio.v3.imwrite(tmp_path / "1.png", im, plugin="pillow", bits=8)
iio.v3.imwrite(tmp_path / "2.png", im, plugin="pillow", bits=2)
size_1 = os.stat(tmp_path / "1.png").st_size
size_2 = os.stat(tmp_path / "2.png").st_size
assert size_2 < size_1
def test_png_16bit(test_images, tmp_path):
# 16b bit images
im = np.load(test_images / "chelsea.npy")[..., 0]
iio.v3.imwrite(
tmp_path / "1.png",
2 * im.astype(np.uint16),
plugin="pillow",
mode="I;16",
)
iio.v3.imwrite(tmp_path / "2.png", im, plugin="pillow", mode="L")
size_1 = os.stat(tmp_path / "1.png").st_size
size_2 = os.stat(tmp_path / "2.png").st_size
assert size_2 < size_1
im2 = iio.v3.imread(tmp_path / "2.png", plugin="pillow")
assert im2.dtype == np.uint8
im3 = iio.v3.imread(tmp_path / "1.png", plugin="pillow")
assert im3.dtype == np.int32
# Note: There was a test here referring to issue #352 and a `prefer_uint8`
# argument that was introduced as a consequence This argument was default=true
# (for backwards compatibility) in the legacy plugin with the recommendation to
# set it to False. In the new API, we literally just wrap Pillow, so we match
# their behavior. Consequentially this test was removed.
@pytest.mark.needs_internet
def test_png_remote():
# issue #202
url = "https://github.com/imageio/imageio-binaries/blob/master/test-images/chelsea.png?raw=true"
im = iio.v3.imread(url, plugin="pillow")
assert im.shape == (300, 451, 3)
def test_png_transparent_pixel(test_images):
# see issue #245
im = iio.v3.imread(
test_images / "imageio_issue246.png",
plugin="pillow",
mode="RGBA",
)
assert im.shape == (24, 30, 4)
def test_png_gamma_correction(test_images: Path):
# opens the file twice, but touches more parts of the API
im1 = iio.v3.imread(test_images / "kodim03.png", plugin="pillow")
im1_meta = iio.v3.immeta(
test_images / "kodim03.png", plugin="pillow", exclude_applied=False
)
im2 = iio.v3.imread(
test_images / "kodim03.png",
plugin="pillow",
apply_gamma=True,
)
# Test result depending of application of gamma
assert im1_meta["gamma"] < 1
assert im1.mean() < im2.mean()
assert im1.shape == (512, 768, 3)
assert im1.dtype == "uint8"
assert im2.shape == (512, 768, 3)
assert im2.dtype == "uint8"
def test_jpg_compression(test_images, tmp_path):
# Note: Note sure if we should test this or pillow
im = np.load(test_images / "chelsea.npy")
iio.v3.imwrite(tmp_path / "1.jpg", im, plugin="pillow", quality=90)
iio.v3.imwrite(tmp_path / "2.jpg", im, plugin="pillow", quality=10)
size_1 = os.stat(tmp_path / "1.jpg").st_size
size_2 = os.stat(tmp_path / "2.jpg").st_size
assert size_2 < size_1
def test_exif_orientation(test_images, tmp_path):
from PIL.Image import Exif
im = np.load(test_images / "chelsea.npy")
# original image is has landscape format
assert im.shape[0] < im.shape[1]
im_flipped = np.rot90(im, -1)
exif_tag = Exif()
exif_tag[274] = 6 # Set Orientation to 6
iio.v3.imwrite(
tmp_path / "chelsea_tagged.png",
im_flipped,
plugin="pillow",
exif=exif_tag,
)
with iio.imopen(
tmp_path / "chelsea_tagged.png",
"r",
plugin="pillow",
) as f:
im_reloaded = f.read()
im_meta = f.get_meta()
# ensure raw image is now portrait
assert im_reloaded.shape[0] > im_reloaded.shape[1]
# ensure that the Exif tag is set in the file
assert "Orientation" in im_meta and im_meta["Orientation"] == 6
im_reloaded = iio.v3.imread(
tmp_path / "chelsea_tagged.png", plugin="pillow", rotate=True
)
assert np.array_equal(im, im_reloaded)
def test_gif_rgb_vs_rgba(test_images):
# Note: I don't understand the point of this test
im_rgb = iio.v3.imread(
test_images / "newtonscradle
|
deepmind/dm_control
|
dm_control/suite/reacher.py
|
Python
|
apache-2.0
| 4,233
| 0.004725
|
# Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Reacher domain."""
import collections
from dm_control import mujoco
from dm_control.rl import control
from dm_control.suite import base
from dm_control.suite import common
from dm_control.suite.utils import randomizers
from dm_control.utils import containers
from dm_control.utils import rewards
import numpy as np
SUITE = containers.TaggedTasks()
_DEFAULT_TIME_LIMIT = 20
_BIG_TARGET = .05
_SMALL_TARGET = .015
def get_model_and_assets():
"""Returns a tuple containing the model XML string and a dict of assets."""
return common.read_model('reacher.xml'), common.ASSETS
@SUITE.add('benchmarking', 'easy')
def easy(time_limit=_DEFAULT_TIME_LIMIT, random=None, environment_kwargs=None):
"""Returns reacher with sparse reward with 5e-2 tol and randomized target."""
physics = Physics.from_xml_string(*get_model_and_assets())
task = Reacher(target_size=_BIG_TARGET, random=random)
environment_kwargs = environment_kwargs or {}
return control.Environment(
physics, task, time_limit=time_limit, **environment_kwargs)
@SUITE.add('benchmarking')
def hard(time_limit=_DEFAULT_TIME_LIMIT, random=None, environment_kwargs=None):
"""Returns reacher with sparse reward with 1e-2 tol and randomized target."""
physics = Physics.from_xml_string(*get_model_and_assets())
task = Reacher(target_size=_SMALL_TARGET, random=random)
environment_kwargs = environment_kwargs or {}
return control.Environment(
physics, task, time_limit=time_limit, **environment_kwargs)
class Physics(mujoco.Physics):
"""Physics simulation with additional features for the Reacher domain."""
def finger_to_target(self):
"""Returns the vector from target to finger in global coordinates."""
return
|
(self.named.data.geom_xpos['target', :2] -
self.named.data.geom_xpos['finger', :2])
def finger_to_target_dist(self):
"""Returns the signed distance between the finger and target surface."""
return np.linalg.norm(self.finger_to_target())
class Reacher(base.Task):
"""A reacher `Task` to reach the target."""
def __init__(self, target_size, random=None):
"""Initialize an instance of `Reacher`.
Args:
target_size: A `float`, tolerance to determine whether finger r
|
eached the
target.
random: Optional, either a `numpy.random.RandomState` instance, an
integer seed for creating a new `RandomState`, or None to select a seed
automatically (default).
"""
self._target_size = target_size
super().__init__(random=random)
def initialize_episode(self, physics):
"""Sets the state of the environment at the start of each episode."""
physics.named.model.geom_size['target', 0] = self._target_size
randomizers.randomize_limited_and_rotational_joints(physics, self.random)
# Randomize target position
angle = self.random.uniform(0, 2 * np.pi)
radius = self.random.uniform(.05, .20)
physics.named.model.geom_pos['target', 'x'] = radius * np.sin(angle)
physics.named.model.geom_pos['target', 'y'] = radius * np.cos(angle)
super().initialize_episode(physics)
def get_observation(self, physics):
"""Returns an observation of the state and the target position."""
obs = collections.OrderedDict()
obs['position'] = physics.position()
obs['to_target'] = physics.finger_to_target()
obs['velocity'] = physics.velocity()
return obs
def get_reward(self, physics):
radii = physics.named.model.geom_size[['target', 'finger'], 0].sum()
return rewards.tolerance(physics.finger_to_target_dist(), (0, radii))
|
iotk/iochibity-java
|
site_scons/iotivityconfig/compiler/configuration.py
|
Python
|
epl-1.0
| 6,546
| 0.000458
|
# ------------------------------------------------------------------------
# Copyright 2015 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
class Configuration:
"""Compiler-specific configuration abstract base class"""
def __init__(self, context):
"""
Initialize the Configuration object
|
Arguments:
context -- the scons configure context
"""
if type(self) is Configuration:
raise TypeErro
|
r('abstract class cannot be instantiated')
self._context = context # scons configure context
self._env = context.env # scons environment
def check_c99_flags(self):
"""
Check if command line flag is required to enable C99
support.
Returns 1 if no flag is required, 0 if no flag was
found, and the actual flag if one was found.
CFLAGS will be updated with appropriate C99 flag,
accordingly.
"""
return self._check_flags(self._c99_flags(),
self._c99_test_program(),
'.c',
'CFLAGS')
def check_cxx11_flags(self):
"""
Check if command line flag is required to enable C++11
support.
Returns 1 if no flag is required, 0 if no flag was
found, and the actual flag if one was found.
CXXFLAGS will be updated with appropriate C++11 flag,
accordingly.
"""
return self._check_flags(self._cxx11_flags(),
self._cxx11_test_program(),
'.cpp',
'CXXFLAGS')
def has_pthreads_support(self):
"""
Check if PThreads are supported by this system
Returns 1 if this system DOES support pthreads, 0
otherwise
"""
return self._context.TryCompile(self._pthreads_test_program(), '.c')
# --------------------------------------------------------------
# Check if flag is required to build the given test program.
#
# Arguments:
# test_flags -- list of flags that may be needed to build
# test_program
# test_program -- program used used to determine if one of the
# given flags is required to for a successful
# build
# test_extension -- file extension associated with the test
# program, e.g. '.cpp' for C++ and '.c' for C
# flags_key -- key used to retrieve compiler flags that may
# be updated by this check from the SCons
# environment
# --------------------------------------------------------------
def _check_flags(self,
test_flags,
test_program,
test_extension,
flags_key):
# Check if no additional flags are required.
ret = self._context.TryCompile(test_program,
test_extension)
if ret is 0:
# Try flags known to enable compiler features needed by
# the test program.
last_flags = self._env[flags_key]
for flag in test_flags:
self._env.Append(**{flags_key : flag})
ret = self._context.TryCompile(test_program,
test_extension)
if ret:
# Found a flag!
return flag
else:
# Restore original compiler flags for next flag
# test.
self._env.Replace(**{flags_key : last_flags})
return ret
# ------------------------------------------------------------
# Return test program to be used when checking for basic C99
# support.
#
# Subclasses should implement this template method or use the
# default test program found in the DefaultConfiguration class
# through composition.
# ------------------------------------------------------------
def _c99_test_program(self):
raise NotImplementedError('unimplemented method')
# --------------------------------------------------------------
# Get list of flags that could potentially enable C99 support.
#
# Subclasses should implement this template method if flags are
# needed to enable C99 support.
# --------------------------------------------------------------
def _c99_flags(self):
raise NotImplementedError('unimplemented method')
# ------------------------------------------------------------
# Return test program to be used when checking for basic C++11
# support.
#
# Subclasses should implement this template method or use the
# default test program found in the DefaultConfiguration class
# through composition.
# ------------------------------------------------------------
def _cxx11_test_program(self):
raise NotImplementedError('unimplemented method')
# --------------------------------------------------------------
# Get list of flags that could potentially enable C++11 support.
#
# Subclasses should implement this template method if flags are
# needed to enable C++11 support.
# --------------------------------------------------------------
def _cxx11_flags(self):
raise NotImplementedError('unimplemented method')
# --------------------------------------------------------------
# Return a test program to be used when checking for PThreads
# support
#
# --------------------------------------------------------------
def _pthreads_test_program(self):
return """
#include <unistd.h>
#include <pthread.h>
int main()
{
#ifndef _POSIX_THREADS
# error POSIX Threads support not available
#endif
return 0;
}
"""
|
iw3hxn/LibrERP
|
dt_price_security/models/product.py
|
Python
|
agpl-3.0
| 3,879
| 0.005414
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import decimal_precision as dp
from openerp.osv import orm, fields
from tools.translate import _
class product_product(orm.Model):
_name = 'product.product'
_inherit = 'product.product'
_columns = {
'list_price_copy': fields.related('list_price',
|
type="float", readonly=True, store=False, string='Sale Price', digits_compute=dp.get_precision('Sale Price'),
help='Base price for computing the customer price. Sometimes called the catalog price.'),
'can_modify_prices': fields.boolean('Can modify prices',
help='If checked all users can modify the price of this product in a sale order or invoice.'),
}
_defaults = {
|
'can_modify_prices': False,
}
def onchange_list_price(self, cr, uid, ids, list_price, uos_coeff, context=None):
return {'value': {'list_price_copy': list_price}}
def fields_get(self, cr, uid, allfields=None, context=None):
if not context:
context = {}
group_obj = self.pool['res.groups']
if group_obj.user_in_group(cr, uid, uid, 'dt_price_security.can_modify_prices', context=context):
context['can_modify_prices'] = True
else:
context['can_modify_prices'] = False
ret = super(product_product, self).fields_get(cr, uid, allfields=allfields, context=context)
if group_obj.user_in_group(cr, uid, uid, 'dt_price_security.can_modify_prices', context=context):
if 'list_price_copy' in ret:
ret['list_price_copy']['invisible'] = True
else:
if 'list_price' in ret:
ret['list_price']['invisible'] = True
if group_obj.user_in_group(cr, uid, uid, 'dt_price_security.hide_purchase_prices', context=context):
if 'standard_price' in ret:
ret['standard_price']['invisible'] = True
if 'cost_method' in ret:
ret['cost_method']['invisible'] = True
if not group_obj.user_in_group(cr, uid, uid, 'dt_price_security.modify_warehouse_price', context=context):
if 'standard_price' in ret:
ret['standard_price']['readonly'] = True
if 'cost_method' in ret:
ret['cost_method']['readonly'] = True
return ret
def write(self, cr, uid, ids, vals, context=None):
if 'list_price' in vals:
group_obj = self.pool['res.groups']
if not group_obj.user_in_group(cr, uid, uid, 'dt_price_security.can_modify_prices', context=context):
title = _('Violation of permissions')
message = _('You do not have the necessary permissions to modify the price of the products')
raise orm.except_orm(title, message)
return super(product_product, self).write(cr, uid, ids, vals, context=context)
|
nataddrho/DigiCue-USB
|
Python3/src/venv/Lib/site-packages/pip/_internal/distributions/sdist.py
|
Python
|
mit
| 4,077
| 0
|
import logging
from pip._internal.build_env import BuildEnvironment
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.exceptions import InstallationError
from pip._internal.utils.subprocess import runner_with_spinner_message
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Set, Tuple
from pip._vendor.pkg_resources import Distribution
from pip._internal.index.package_finder import PackageFinder
logger = logging.getLogger(__name__)
class SourceDistribution(AbstractDistribution):
"""Represents a source distribution.
The preparation step for these needs metadata for the packages to be
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
"""
def get_pkg_resources_distribution(self):
# type: () -> Distribution
return self.req.get_dist()
def prepare_distribution_metadata(self, finder, build_isolation):
# type: (PackageFinder, bool) -> None
# Load pyproject.toml, to determine whether PEP 517 is to be used
self.req.load_pyproject_toml()
# Set up the build isolation, if this requirement should be isolated
should_isolate = self.req.use_pep517 and build_isolation
if should_isolate:
self._setup_isolation(finder)
self.req.prepare_metadata()
def _setup_isolation(self, finder):
# type: (PackageFinder) -> None
def _raise_con
|
flicts(conflicting_with, conflicting_reqs):
# type: (str, Set[Tuple[str, str]]) -> None
format_string = (
"Some build dependencies for {requirement} "
"conflict with {conflicting_with}: {description}."
)
error_message = format_string.format(
requirement=self.req,
confl
|
icting_with=conflicting_with,
description=', '.join(
f'{installed} is incompatible with {wanted}'
for installed, wanted in sorted(conflicting)
)
)
raise InstallationError(error_message)
# Isolate in a BuildEnvironment and install the build-time
# requirements.
pyproject_requires = self.req.pyproject_requires
assert pyproject_requires is not None
self.req.build_env = BuildEnvironment()
self.req.build_env.install_requirements(
finder, pyproject_requires, 'overlay',
"Installing build dependencies"
)
conflicting, missing = self.req.build_env.check_requirements(
self.req.requirements_to_check
)
if conflicting:
_raise_conflicts("PEP 517/518 supported requirements",
conflicting)
if missing:
logger.warning(
"Missing build requirements in pyproject.toml for %s.",
self.req,
)
logger.warning(
"The project does not specify a build backend, and "
"pip cannot fall back to setuptools without %s.",
" and ".join(map(repr, sorted(missing)))
)
# Install any extra build dependencies that the backend requests.
# This must be done in a second pass, as the pyproject.toml
# dependencies must be installed before we can call the backend.
with self.req.build_env:
runner = runner_with_spinner_message(
"Getting requirements to build wheel"
)
backend = self.req.pep517_backend
assert backend is not None
with backend.subprocess_runner(runner):
reqs = backend.get_requires_for_build_wheel()
conflicting, missing = self.req.build_env.check_requirements(reqs)
if conflicting:
_raise_conflicts("the backend dependencies", conflicting)
self.req.build_env.install_requirements(
finder, missing, 'normal',
"Installing backend dependencies"
)
|
legendlee1314/ooni
|
hdfs2mongo_distributed.py
|
Python
|
mit
| 3,868
| 0.002844
|
# Author: legend
# Mail: legendlee1314@gmail.com
# File: hdfs2mongo_distributed.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup as bs
from bson.json_util import loads
from pymongo import MongoClient as mc
import pymongo
import pydoop.hdfs as hdfs
import zmq
import hashlib
import os
import random
import re
import sys
import time
server_tcp = "tcp://*:20003"
client_tcp = "tcp://172.16.104.62:20003"
host_name = os.uname()[1]
mongo_host = [
'mongodb://172.16.104.9:20001',
'mongodb://172.16.104.1:20001',
'mongodb://172.16.104.13:20001',
'mongodb://172.16.104.18:20001'
]
db = 'hdb'
username = 'hdb_admin'
def xml_from_hdfs(url):
with hdfs.open(url, "r") as f:
lines = f.read().strip().split('\n')
docs, doc = [], None
for line in lines:
if line.startswith('<doc'):
doc = line
elif line.startswith('</doc>'):
docs.append(doc + line)
else:
#line = line.replace('&', '').replace('"', "'")
doc += line.replace('"', "'")
for doc in docs:
dom = bs(doc).find('doc')
doc = {}
try:
doc['id'] = dom.attrs['id']
doc['url'] = dom.attrs['url']
doc['title'] = dom.attrs['title']
except AttributeError, e:
continue
doc['content'] = dom.text
doc['md5'] = hashlib.md5(str(doc)).hexdigest()
yield doc
def write_to_mongo(docs, collection, dup=False):
assert docs and collection
client = mc(mongo_host[random.randint(0, 2)])
database = client[db]
#database.authenticate(username, password=username)
collection = database[collection]
count = 0
for doc in docs:
if dup:
try:
collection.insert_one(doc)
|
except pymongo.errors.DuplicateKeyError, e:
print e
elif collection.find_one({'md5': doc['md5']}) is None:
collection.insert_one(doc)
count += 1
time.sleep(1)
print host_name + ' write ' + str(count)
def client():
pri
|
nt 'Client...'
#docs = xml_from_hdfs('/datasets/corpus/enwiki-11g')
#write_to_mongo(docs, 'enwiki', True)
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.connect(client_tcp)
socket.send("connect:" + host_name)
message = socket.recv()
if message != 'connected':
return
print 'Connected...'
for request in range(100):
socket.send("read:" + host_name)
message = socket.recv()
if message.startswith("done"):
return
else:
f = message.split('>')[1]
print f
docs = xml_from_hdfs(f)
write_to_mongo(docs, 'enwiki', True)
def server():
print 'Server...'
context = zmq.Context()
socket = context.socket(zmq.REP)
socket.bind(server_tcp)
client_list = []
hdfs_url = '/datasets/corpus/enwiki-11g'
file_list = hdfs.ls(hdfs_url)
print len(file_list)
while True:
message = socket.recv()
if message.startswith("connect"):
client_list.append(message.split(':')[1])
socket.send("connected")
elif message.startswith("read"):
client = message.split(':')[1]
print client
if len(file_list) == 0:
socket.send("done")
client_list.remove(client)
if len(client_list) == 0:
return
if client in client_list:
f = file_list.pop()
print len(file_list)
print f
socket.send_string("file>" + f)
if __name__ == "__main__":
if sys.argv[1] == 'server':
server()
else:
client()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.