repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
petrjasek/superdesk-core
|
superdesk/etree.py
|
Python
|
agpl-3.0
| 5,846
| 0.002053
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2017 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from lxml import etree # noqa
from lxml.etree import ParseError # noqa
from lxml import html
from superdesk import config
# from https://developer.mozilla.org/en-US/docs/Web/HTML/Block-level_elements
BLOCK_ELEMENTS = (
"address",
"article",
"aside",
"blockquote",
"canvas",
"dd",
"div",
"dl",
"fieldset",
"figcaption",
"figure",
"footer",
"form",
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
"header",
"hgroup",
"hr",
"li",
"main",
"nav",
"noscript",
"ol",
"output",
"p",
"pre",
"section",
"table",
"tfoot",
"ul",
"video",
)
# from https://www.w3.org/TR/html/syntax.html#void-elements
VOID_ELEMENTS = (
"area",
"base",
"br",
"col",
"embed",
"hr",
"img",
"input",
"keygen",
"link",
"menuitem",
"meta",
"param",
"source",
"track",
"wbr",
)
def fix_html_void_elements(element):
"""Use self-closing elements for HTML void elements, and start/end pairs otherwise
:param element: Element to fix
:type element: lxml.etree.Element
:return: fixed Element
"""
# we want self closing for HTML void elemends and start/end tags otherwise
# so we set element.text to None for void ones, and empty string otherwise
for e in element.xpath("//*[not(node())]"):
e.text = None if e.tag in VOID_ELEMENTS else ""
return element
def parse_html(html, content="xml", lf_on_block=False, space_on_elements=False, space=" "):
"""Parse element and return etreeElement
<div> element is added around the HTML
recovery is used in case of bad markup
:param str html: HTML markup
:param str content: use 'xml' for XHTML or non html XML, and 'html' for HTML or if you are unsure
:param bool lf_on_block: if True, add a line feed on block elements' tail
:param bool space_on_elements: if True, add a space on each element's tail
mainly used to count words with non HTML markup
:param str space: space string which is used when `space_on_elements` is enabled
:return etree.Element: parsed element
"""
if not isinstance(html, str):
raise ValueError("a string is expected")
if not html:
return etree.Element("div")
if content == "xml":
# to preserve 'carriage return' otherwise it gets stripped.
html = html.replace("\r", " ")
parser = etree.XMLParser(recover=True, remove_blank_text=True)
root = etree.fromstring("<div>" + html + "</div>", parser)
elif content == "html":
parser = etree.HTMLParser(recover=True, remove_blank_text=True)
root = etree.fromstring(html, parser)
if root is None:
root = etree.Element("div")
else:
div = etree.Element("div")
# we unwrap
|
elements in <head> and <body>
# <script> can be used in embed, and the parser will move them to <head>
# so we need both <head> and <body>
for elt in root:
div.extend(elt)
root = div
else:
raise ValueError("invalid content: {}".format(content))
if lf_on_block:
for elem in root.iterfind(".//"):
# append \n to the tail
if elem
|
.tag in BLOCK_ELEMENTS:
elem.tail = (elem.tail or "") + "\n"
# prepend \n to the tail
elif elem.tag in ("br",):
elem.tail = "\n" + (elem.tail or "")
if space_on_elements:
for elem in root.iterfind(".//"):
elem.tail = (elem.tail or "") + space
return root
def to_string(elem, encoding="unicode", method="xml", remove_root_div=True, pretty_print=False):
"""Convert Element to string
:param etree.Element elem: element to convert
:param str encoding: encoding to use (same as for etree.tostring)
:param str method: method to use (same as for etree.tostring)
:param bool remove_root_dir: if True remove surrounding <div> which is added by parse_html
:return str: converted element
"""
string = etree.tostring(elem, encoding=encoding, method=method, pretty_print=pretty_print)
if remove_root_div:
if encoding == "unicode":
div_start = "<div>"
div_end = "</div>"
else:
div_start = b"<div>"
div_end = b"</div>"
if string.startswith(div_start) and string.endswith(div_end):
return string[len(div_start) : -len(div_end)]
return string
def clean_html(elem):
"""Clean HTML element by removing unknown or unsafe elements/attributes
use config.HTML_TAGS_WHITELIST as list of known tags (i.e. tags understood by client)
:param etree._Element elem: element to clean (will be converted to HtmlElement if it is not already one
:return html.HtmlElement: cleaned element
"""
if not isinstance(elem, html.HtmlElement):
elem = html.fromstring(etree.tostring(elem))
safe_attrs = set(html.defs.safe_attrs)
safe_attrs.remove("class")
cleaner = html.clean.Cleaner(
allow_tags=config.HTML_TAGS_WHITELIST, remove_unknown_tags=False, safe_attrs=safe_attrs
)
return cleaner.clean_html(elem)
def clean_html_str(html_str):
"""Clean HTML like clean_html but using a string as input
:param str html_str: raw HTML to clean
:return str: cleaned HTML
"""
html_elt = parse_html(html_str, "html")
html_elt = clean_html(html_elt)
return to_string(html_elt, method="html")
|
SasView/sasview
|
src/sas/qtgui/Perspectives/Fitting/UnitTesting/FittingUtilitiesTest.py
|
Python
|
bsd-3-clause
| 11,524
| 0.003818
|
import sys
import unittest
from unittest.mock import MagicMock
from PyQt5 import QtGui, QtCore
from sas.qtgui.Plotting.PlotterData import Data1D
from sas.qtgui.Plotting.PlotterData import Data2D
from UnitTesting.TestUtils import WarningTestNotImplemented
from sasmodels import generate
from sasmodels import modelinfo
from sasmodels.sasview_model import load_standard_models
# Tested module
from sas.qtgui.Perspectives.Fitting import FittingUtilities
from sas.qtgui.Perspectives.Fitting.FittingUtilities import checkConstraints
class FittingUtilitiesTest(unittest.TestCase):
'''Test the Fitting Utilities functions'''
def setUp(self):
'''Empty'''
pass
def tearDown(self):
'''Empty'''
pass
def testReplaceShellName(self):
"""
Test the utility function for string manipulation
"""
param_name = "test [123]"
value = "replaced"
result = FittingUtilities.replaceShellName(param_name, value)
self.assertEqual(result, "test replaced")
# Assert!
param_name = "no brackets"
with self.assertRaises(AssertionError):
result = FittingUtilities.replaceShellName(param_name, value)
def testGetIterParams(self):
"""
Assure the right multishell parameters are returned
"""
# Use a single-shell parameter
model_name = "barbell"
kernel_module = generate.load_kernel_module(model_name)
barbell_parameters = modelinfo.make_parameter_table(getattr(kernel_module, 'parameters', []))
params = FittingUtilities.getIterParams(barbell_parameters)
# returns empty list
self.assertEqual(params, [])
# Use a multi-shell parameter
model_name = "core_multi_shell"
|
kernel_module = generate.load_kernel_module(model_name)
multishell_parameters = modelinfo.ma
|
ke_parameter_table(getattr(kernel_module, 'parameters', []))
params = FittingUtilities.getIterParams(multishell_parameters)
# returns a non-empty list
self.assertNotEqual(params, [])
self.assertIn('sld', str(params))
self.assertIn('thickness', str(params))
def testGetMultiplicity(self):
"""
Assure more multishell parameters are evaluated correctly
"""
# Use a single-shell parameter
model_name = "barbell"
kernel_module = generate.load_kernel_module(model_name)
barbell_parameters = modelinfo.make_parameter_table(getattr(kernel_module, 'parameters', []))
param_name, param_length = FittingUtilities.getMultiplicity(barbell_parameters)
# returns nothing
self.assertEqual(param_name, "")
self.assertEqual(param_length, 0)
# Use a multi-shell parameter
model_name = "core_multi_shell"
kernel_module = generate.load_kernel_module(model_name)
multishell_parameters = modelinfo.make_parameter_table(getattr(kernel_module, 'parameters', []))
param_name, param_length = FittingUtilities.getMultiplicity(multishell_parameters)
self.assertEqual(param_name, "n")
self.assertEqual(param_length, 10)
def testAddParametersToModel(self):
"""
Checks the QModel update from Sasmodel parameters
"""
# Use a single-shell parameter
model_name = "barbell"
models = load_standard_models()
kernel_module = generate.load_kernel_module(model_name)
kernel_module_o = None
for model in models:
if model.name == model_name:
kernel_module_o = model()
self.assertIsNotNone(kernel_module_o)
barbell_parameters = modelinfo.make_parameter_table(getattr(kernel_module, 'parameters', []))
params = FittingUtilities.addParametersToModel(barbell_parameters, kernel_module_o, True)
# Test the resulting model
self.assertEqual(len(params), 7)
self.assertEqual(len(params[0]), 5)
self.assertTrue(params[0][0].isCheckable())
self.assertEqual(params[0][0].text(), "sld")
self.assertEqual(params[1][0].text(), "sld_solvent")
# Use a multi-shell parameter to see that the method includes shell params
model_name = "core_multi_shell"
kernel_module = generate.load_kernel_module(model_name)
kernel_module_o = None
for model in models:
if model.name == model_name:
kernel_module_o = model()
self.assertIsNotNone(kernel_module_o)
multi_parameters = modelinfo.make_parameter_table(getattr(kernel_module, 'parameters', []))
params = FittingUtilities.addParametersToModel(multi_parameters, kernel_module_o, False)
# Test the resulting model
self.assertEqual(len(params), 3)
self.assertEqual(len(params[0]), 5)
self.assertTrue(params[0][0].isCheckable())
self.assertEqual(params[0][0].text(), "sld_core")
self.assertEqual(params[1][0].text(), "radius")
def testAddSimpleParametersToModel(self):
"""
Checks the QModel update from Sasmodel parameters - no polydisp
"""
# Use a multi-shell parameter to see that the method doesn't include shells
model_name = "core_multi_shell"
kernel_module = generate.load_kernel_module(model_name)
models = load_standard_models()
kernel_module_o = None
for model in models:
if model.name == model_name:
kernel_module_o = model()
self.assertIsNotNone(kernel_module_o)
multi_parameters = modelinfo.make_parameter_table(getattr(kernel_module, 'parameters', []))
params = FittingUtilities.addParametersToModel(multi_parameters, kernel_module_o, True)
# Test the resulting model
self.assertEqual(len(params), 3)
self.assertEqual(len(params[0]), 5)
self.assertTrue(params[0][0].isCheckable())
self.assertEqual(params[0][0].text(), "sld_core")
self.assertEqual(params[1][0].text(), "radius")
def testAddCheckedListToModel(self):
"""
Test for inserting a checkboxed item into a QModel
"""
model = QtGui.QStandardItemModel()
params = ["row1", "row2", "row3"]
FittingUtilities.addCheckedListToModel(model, params)
# Check the model
self.assertEqual(model.rowCount(), 1)
self.assertTrue(model.item(0).isCheckable())
self.assertEqual(model.item(0, 0).text(), params[0])
self.assertEqual(model.item(0, 1).text(), params[1])
self.assertEqual(model.item(0, 2).text(), params[2])
def testAddShellsToModel(self):
"""
Test for inserting a list of QItems into a model
"""
# Use a multi-shell parameter to see that the method doesn't include shells
model_name = "core_multi_shell"
kernel_module = generate.load_kernel_module(model_name)
multi_parameters = modelinfo.make_parameter_table(getattr(kernel_module, 'parameters', []))
model = QtGui.QStandardItemModel()
index = 2
FittingUtilities.addShellsToModel(multi_parameters, model, index)
# There should be index*len(multi_parameters) new rows
self.assertEqual(model.rowCount(), 4)
model = QtGui.QStandardItemModel()
index = 5
FittingUtilities.addShellsToModel(multi_parameters, model, index)
self.assertEqual(model.rowCount(), 10)
self.assertEqual(model.item(1).child(0).text(), "Polydispersity")
self.assertEqual(model.item(1).child(0).child(0).text(), "Distribution")
self.assertEqual(model.item(1).child(0).child(0,1).text(), "40.0")
def testCalculate1DChi2(self):
"""
Test the chi2 calculator for Data1D
"""
reference_data = Data1D(x=[0.1, 0.2], y=[0.0, 0.0])
# 1. identical data
current_data = Data1D(x=[0.1, 0.2], y=[0.0, 0.0])
weights = None
chi = FittingUtilities.calculateChi2(reference_data, current_data, weights)
# Should be zero
self.assertAlmostEqual(chi, 0.0, 8)
# 2. far dat
|
szepeviktor/debian-server-tools
|
mail/mx-check/hubspot-free-email-domains.py
|
Python
|
mit
| 772
| 0.002591
|
#!/usr/bin/env python3
#
# List HubSpot's free email domains.
from urllib.request import urlopen
# pip3 install --user beautifulsoup4
# https://www.crummy.com/sof
|
tware/BeautifulSoup/bs4/doc/#strings-and-stripped-strings
from bs4 import BeautifulSoup
URL = "https://knowledge.hubspot.com/articles/kcs_article/forms/what-domains-are-blocked-when-using-the-forms-email-domains-to-block-feature"
page = urlopen(URL)
html = page.read()
soup = BeautifulSoup(html, "html.parser")
# Open an HTML file
#with open("hubspot.html") as fp:
# soup = BeautifulSoup(fp, "ht
|
ml.parser")
# Original CSS selector: "#post-body span > p:not(1)"
ps = soup.select("#post-body span > p")
# Skip first paragraph
for p in ps[1:]:
for domain in p.stripped_strings:
print(domain)
|
larrybradley/astropy-helpers
|
astropy_helpers/version_helpers.py
|
Python
|
bsd-3-clause
| 9,639
| 0
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Utilities for generating the version string for Astropy (or an affiliated
package) and the version.py module, which contains version info for the
package.
Within the generated astropy.version module, the `major`, `minor`, and `bugfix`
variables hold the respective parts of the version number (bugfix is '0' if
absent). The `release` variable is True if this is a release, and False if this
is a development version of astropy. For the actual version string, use::
from astropy.version import version
or::
from astropy import __version__
"""
from __future__ import division
import datetime
import imp
import os
import pkgutil
import sys
from distutils import log
import pkg_resources
from . import git_helpers
from .distutils_helpers import is_distutils_display_option
from .utils import invalidate_caches
PY3 = sys.version_info[0] == 3
def _version_split(version):
"""
Split a version string into major, minor, and bugfix numbers. If any of
those numbers are missing the default is zero. Any pre/post release
modifiers are ignored.
Examples
========
>>> _version_split('1.2.3')
(1, 2, 3)
>>> _version_split('1.2')
(1, 2, 0)
>>> _version_split('1.2rc1')
(1, 2, 0)
>>> _version_split('1')
(1, 0, 0)
>>> _version_split('')
(0, 0, 0)
"""
parsed_version = pkg_resources.parse_version(version)
if hasattr(parsed_version, 'base_version'):
# New version parsing for setuptools >= 8.0
if parsed_version.base_version:
parts = [int(part)
for part in parsed_version.base_version.split('.')]
else:
parts = []
else:
parts = []
for part in parsed_version:
if part.startswith('*'):
# Ignore any .dev, a, b, rc, etc.
break
parts.append(int(part))
if len(parts) < 3:
parts += [0] * (3 - len(parts))
# In principle a version could have more parts (like 1.2.3.4) but we only
# support <major>.<minor>.<micro>
return tuple(parts[:3])
# This is used by setup.py to create a new version.py - see that file for
# details. Note that the imports have to be absolute, since this is also used
# by affiliated packages.
_FROZEN_VERSION_PY_TEMPLATE = """
# Autogenerated by {packagetitle}'s setup.py on {timestamp!s}
from __future__ import unicode_literals
import datetime
{header}
major = {major}
minor = {minor}
bugfix = {bugfix}
release = {rel}
timestamp = {timestamp!r}
debug = {debug}
try:
from ._compiler import compiler
except ImportError:
compiler = "unknown"
try:
from .cython_version import cython_version
except ImportError:
cython_version = "unknown"
"""[1:]
_FROZEN_VERSION_PY_WITH_GIT_HEADER = """
{git_helpers}
_packagename = "{packagename}"
_last_generated_version = "{verstr}"
_last_githash = "{githash}"
# Determine where the source code for this module
# lives. If __file__ is not a filesystem path then
# it is assumed not to live in a git repo at all.
if _get_repo_path(__file__, levels=len(_packagename.split('.'))):
version = update_git_devstr(_last_generated_version, path=__file__)
githash = get_git_devstr(sha=True, show_warning=False,
path=__file__) or _last_githash
else:
# The file does not appear to live in a git repo so don't bother
# invoking git
version = _last_generated_version
githash = _last_githash
"""[1:]
_FROZEN_VERSION_PY_STATIC_HEADER = """
version = "{verstr}"
githash = "{githash}"
"""[1:]
def _get_version_py_str(packagename, version, githash, release, debug,
uses_git=True):
timestamp = datetime.datetime.now()
major, minor, bugfix = _version_split(version)
if packagename.lower() == 'astropy':
packagetitle = 'Astropy'
else:
packagetitle = 'Astropy-affiliated package ' + packagename
header = ''
if uses_git:
header = _generate_git_header(packagename, version, githash)
elif not githash:
# _generate_git_header will already generate a new git has for us, but
# for creating a new version.py for a release (even if uses_git=False)
# we still need to get the githash to include in the version.py
# See https://github.com/astropy/astropy-helpers/issues/141
githash = git_helpers.get_git_devstr(sha=True, show_warning=True)
if not header: # If _generate_git_header fails it returns an empty string
header = _FROZEN_VERSION_PY_STATIC_HEADER.format(verstr=version,
githash=githash)
return _FROZEN_VERSION_PY_TEMPLATE.format(packagetitle=packagetitle,
timestamp=timestamp,
header=header,
major=major,
minor=minor,
bugfix=bugfix,
rel=release, debug=debug)
def _generate_git_header(packagename, version, githash):
"""
Generates a header to the version.py module that includes utilities for
probing the git repository for updates (to the current git hash, etc.)
These utilities should only be available in development versions, and not
in release builds.
I
|
f this fails for any reason an empty string is returned.
"""
loader = pkgutil.get_loader(git_helpers)
source = loader.get_source(git_helpers.__name__) or ''
source_lines = source.splitlines()
if not source_lines:
log.warn('Cannot get source code for astropy_helpers.git_helpers; '
'git support disabled.')
return ''
idx = 0
for idx, line in enumerate(source_lines):
i
|
f line.startswith('# BEGIN'):
break
git_helpers_py = '\n'.join(source_lines[idx + 1:])
if PY3:
verstr = version
else:
# In Python 2 don't pass in a unicode string; otherwise verstr will
# be represented with u'' syntax which breaks on Python 3.x with x
# < 3. This is only an issue when developing on multiple Python
# versions at once
verstr = version.encode('utf8')
new_githash = git_helpers.get_git_devstr(sha=True, show_warning=False)
if new_githash:
githash = new_githash
return _FROZEN_VERSION_PY_WITH_GIT_HEADER.format(
git_helpers=git_helpers_py, packagename=packagename,
verstr=verstr, githash=githash)
def generate_version_py(packagename, version, release=None, debug=None,
uses_git=True):
"""Regenerate the version.py module if necessary."""
try:
version_module = get_pkg_version_module(packagename)
try:
last_generated_version = version_module._last_generated_version
except AttributeError:
last_generated_version = version_module.version
try:
last_githash = version_module._last_githash
except AttributeError:
last_githash = version_module.githash
current_release = version_module.release
current_debug = version_module.debug
except ImportError:
version_module = None
last_generated_version = None
last_githash = None
current_release = None
current_debug = None
if release is None:
# Keep whatever the current value is, if it exists
release = bool(current_release)
if debug is None:
# Likewise, keep whatever the current value is, if it exists
debug = bool(current_debug)
version_py = os.path.join(packagename, 'version.py')
if (last_generated_version != version or current_release != release or
current_debug != debug):
if '-q' not in sys.argv and '--quiet' not in sys.argv:
log.set_threshold(log.INFO)
if is_distutils_display_option():
# Always silence unnecessary log messages when display options are
#
|
tapomayukh/projects_in_python
|
classification/Classification_with_CRF/old_crfsuite_package/old code/feature_conversion.py
|
Python
|
mit
| 5,099
| 0.016278
|
#!/usr/bin/env python
import numpy as np
# Create features for tagging a sequence.
def create_features(f, g, history):
tempf = []
tempa = []
tempm = []
for line in f:
if line != '\n':
idstr = line.split()[3]
if line == '\n':
templen = np.size(tempf,0)
if templen == 0:
print "do nothing"
else:
j = 0
while (j < (templen-2*history)): # No. of items in a sequence
g.write(idstr +' ') # Label
# All single features
for i in range(2*history):
if (i+j) < templen:
g.
|
write('f' +str([i-history]) + '=f')
if (i-history) < 0:
|
g.write('n' + str(abs(i-history)) + ':' + tempf[i+j] + ' ')
else:
g.write(str(i-history) + ':' + tempf[i+j] + ' ')
for i in range(2*history):
if (i+j) < templen:
g.write('a' +str([i-history]) + '=a')
if (i-history) < 0:
g.write('n' + str(abs(i-history)) + ':' + tempa[i+j] + ' ')
else:
g.write(str(i-history) + ':' + tempa[i+j] + ' ')
for i in range(2*history):
if (i+j) < templen:
g.write('m' +str([i-history]) + '=m')
if (i-history) < 0:
g.write('n' + str(abs(i-history)) + ':' + tempm[i+j] + ' ')
else:
g.write(str(i-history) + ':' + tempm[i+j] + ' ')
#All combined features (double)
for i in range(2*history):
if (i+j+1) < templen:
g.write('f' +str([i-history]) + '|f' +str([i-history+1]) + '=f')
if (i-history) < 0 and (i-history+1) < 0:
g.write('n' + str(abs(i-history)) + '|fn' +str(abs(i-history+1)) + ':' + str(float(tempf[i+j])/float(tempf[i+j+1])) + ' ')
elif (i-history) < 0 and (i-history+1) == 0:
g.write('n' + str(abs(i-history)) + '|f' +str(abs(i-history+1)) + ':' + str(float(tempf[i+j])/float(tempf[i+j+1])) + ' ')
else:
g.write(str(i-history) + '|f' +str(abs(i-history+1)) + ':' + str(float(tempf[i+j])/float(tempf[i+j+1])) + ' ')
for i in range(2*history):
if (i+j+1) < templen:
g.write('a' +str([i-history]) + '|a' +str([i-history+1]) + '=a')
if (i-history) < 0 and (i-history+1) < 0:
g.write('n' + str(abs(i-history)) + '|an' +str(abs(i-history+1)) + ':' + str(float(tempa[i+j])/float(tempa[i+j+1])) + ' ')
elif (i-history) < 0 and (i-history+1) == 0:
g.write('n' + str(abs(i-history)) + '|a' +str(abs(i-history+1)) + ':' + str(float(tempa[i+j])/float(tempa[i+j+1])) + ' ')
else:
g.write(str(i-history) + '|a' +str(abs(i-history+1)) + ':' + str(float(tempa[i+j])/float(tempa[i+j+1])) + ' ')
for i in range(2*history):
if (i+j+1) < templen:
g.write('m' +str([i-history]) + '|m' +str([i-history+1]) + '=m')
if (i-history) < 0 and (i-history+1) < 0:
g.write('n' + str(abs(i-history)) + '|mn' +str(abs(i-history+1)) + ':' + str(float(tempm[i+j])/float(tempm[i+j+1])) + ' ')
elif (i-history) < 0 and (i-history+1) == 0:
g.write('n' + str(abs(i-history)) + '|m' +str(abs(i-history+1)) + ':' + str(float(tempm[i+j])/float(tempm[i+j+1])) + ' ')
else:
g.write(str(i-history) + '|m' +str(abs(i-history+1)) + ':' + str(float(tempm[i+j])/float(tempm[i+j+1])) + ' ')
g.write('\n') # go to next item in the sequence (Each line is one item)
j=j+1
g.write('\n') # go to next sequence
tempf = []
tempa = []
tempm = []
else:
tempf.append(line.split()[0])
tempa.append(line.split()[1])
tempm.append(line.split()[2])
if __name__ == '__main__':
# For Training
f = open('train.txt', 'r')
g = open('train.crfsuite.txt', 'w')
create_features(f,g,5)
# For Testing
ftest = open('test.txt', 'r')
gtest = open('test.crfsuite.txt', 'w')
create_features(ftest,gtest,5)
|
mohierf/mod-webui
|
module/plugins/eltdetail/eltdetail.py
|
Python
|
agpl-3.0
| 2,612
| 0.001149
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2012:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import time
# Will be populated by the UI with it's own value
app = None
# Host element view
def show_host(host_name):
# Ok, we can lookup it
user = app.bottle.request.environ['USER']
h = app.datamgr.get_host(host_name, user) or app.redirect404()
# Set hostgroups level ...
app.datamgr.set_hostgroups_level(user)
# Get graph data. By default, show last 4 hours
now = int(time.time())
graphstart = int(app.request.GET.get('graphstart', str(now - 4 * 3600)))
graphend = int(app.request.GET.get('graphend', str(now)))
return {
'elt': h,
'graphstart': graphstart, 'graphend': graphend,
'configintervallength': app.datamgr.get_configuration_parameter('interval_length')
}
# Service element view
def show_service(host_name, service):
user = app.bottle.request.environ['USER']
s = app.dat
|
amgr.get_service(host_name, service, user) or app.redirect404()
# Set servicegroups level ...
app.datamgr.set_servicegroups_level(user)
# Get graph data. By default, show last 4 hours
now = int(time.time())
graphstart = int(app
|
.request.GET.get('graphstart', str(now - 4 * 3600)))
graphend = int(app.request.GET.get('graphend', str(now)))
return {
'elt': s,
'graphstart': graphstart, 'graphend': graphend,
'configintervallength': app.datamgr.get_configuration_parameter('interval_length')
}
pages = {
show_host: {
'name': 'Host', 'route': '/host/:host_name', 'view': 'eltdetail',
'static': True
},
show_service: {
'name': 'Service', 'route': '/service/:host_name/:service#.+#', 'view': 'eltdetail',
'static': True
}
}
|
e-koch/VLA_Lband
|
14B-088/HI/turbulence/M33_turbstats.py
|
Python
|
mit
| 5,008
| 0.001198
|
'''
Run a variety of turbulent statistics on the full M33 cube.
This will use a TON of memory. Recommend running on a cluster.
File structure setup to work on cedar in scratch space.
Saves the outputs for later use
'''
from spectral_c
|
ube import SpectralCube
from astropy.io import fits
from os.path import join as osjoin
import astropy.units as u
fr
|
om astropy import log
import sys
from turbustat.statistics import (PowerSpectrum, VCA, VCS, PCA, SCF,
StatMoments, DeltaVariance)
ncore = int(sys.argv[-1])
run_pspec = False
run_delvar = False
run_moments = False
run_vca = False
run_vcs = True
run_pca = True
run_scf = True
scratch_path = "/home/ekoch/scratch/M33_turbulence/"
cube_name = osjoin(scratch_path, "M33_14B-088_HI.clean.image.GBT_feathered.pbcov_gt_0.5_masked.com_beam.fits")
mom0_name = osjoin(scratch_path, "M33_14B-088_HI.clean.image.GBT_feathered.pbcov_gt_0.5_masked.mom0.fits")
out_path = lambda x: osjoin(scratch_path, x)
if run_pspec:
log.info("Running PowerSpectrum")
mom0 = fits.open(mom0_name)[0]
pspec = PowerSpectrum(mom0).run(use_pyfftw=True, threads=ncore - 1)
pspec.save_results(out_path("pspec_m33_14B088.pkl"), keep_data=False)
del pspec
if run_delvar:
log.info("Running Delta-Variance")
mom0 = fits.open(mom0_name)[0]
delvar = DeltaVariance(mom0).run(use_pyfftw=True, threads=ncore - 1)
delvar.save_results(out_path("delvar_m33_14B088.pkl"), keep_data=False)
del delvar
if run_moments:
log.info("Running Stat Moments")
mom0 = fits.open(mom0_name)[0]
# Run with a couple of different radii
# Beam size (close to it)
radii = [7, 14, 21] * u.pix
for radius in radii:
log.info("StatMoments radius: {}".format(radius))
moments = StatMoments(mom0, radius=radius).run()
moments.save_results(out_path("moments_m33_14B088_radius_{}pix.pkl".format(radius.value)),
keep_data=False)
del moments
if run_vca:
log.info("Running VCA")
# Avoid loading in multiple times
try:
cube.shape
except NameError:
cube = SpectralCube.read(cube_name, memmap=False)
# Run VCA over a number of channel sizes.
# First one keeps the original channel width.
chan_widths = [None, 0.4 * u.km / u.s, 0.6 * u.km / u.s,
0.8 * u.km / u.s, 1.0 * u.km / u.s,
1.6 * u.km / u.s, 2.0 * u.km / u.s,
3.0 * u.km / u.s, 4.0 * u.km / u.s,
5.0 * u.km / u.s, 6.0 * u.km / u.s,
8.0 * u.km / u.s, 10.0 * u.km / u.s,
20.0 * u.km / u.s, 40.0 * u.km / u.s]
for chan in chan_widths:
log.info("On VCA channel width {}".format(chan))
vca = VCA(cube, channel_width=chan).run(use_pyfftw=True,
threads=ncore - 1)
if chan is None:
chan = 0.2 * u.km / u.s
vca.save_results(out_path("vca_m33_14B088_chanwidth_{}_kms.pkl".format(chan.value)),
keep_data=False)
del vca
if run_vcs:
log.info("Running VCS")
# Avoid loading in multiple times
try:
cube.shape
except NameError:
cube = SpectralCube.read(cube_name, memmap=False)
# Run VCS when varying the spatial resolution.
# First one keeps the original beam size.
from radio_beam import Beam
majors = [None, 38 * u.arcsec, 57 * u.arcsec,
76 * u.arcsec, 95 * u.arcsec]
for major in majors:
log.info("On VCS resolution {}".format(major))
if major is None:
conv_cube = cube
else:
new_beam = Beam(major)
conv_cube = cube.convolve_to(new_beam)
vcs = VCS(conv_cube)
vcs.run(use_pyfftw=True, threads=ncore - 1)
if major is None:
major = 19 * u.arcsec
vcs.save_results(out_path("vcs_m33_14B088_chanwidth_{}_arcsec.pkl".format(major.value)),
keep_data=False)
del vcs
del conv_cube
if run_pca:
log.info("Running PCA")
# Avoid loading in multiple times
try:
cube.shape
except NameError:
cube = SpectralCube.read(cube_name, memmap=False)
pca = PCA(cube, distance=840 * u.kpc).run(min_eigval=0.001,
beam_fwhm=19 * u.arcsec,
spatial_output_unit=u.pc,
spectral_output_unit=u.km / u.s)
pca.save_results(out_path("pca_m33_14B088.pkl"),
keep_data=False)
del pca
if run_scf:
log.info("Running SCF")
try:
cube.shape
except NameError:
cube = SpectralCube.read(cube_name, memmap=False)
# Note that the rolls are going to take a LONG time!!
scf = SCF(cube, size=36).run()
scf.save_results(out_path("scf_m33_14B088.pkl"), keep_data=False)
del scf
|
akulakov/mangotrac
|
proj_issues/issues/migrations/0008_auto__del_field_report_url__add_field_report_columns__add_field_report.py
|
Python
|
mit
| 11,960
| 0.007023
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Report.url'
db.delete_column(u'issues_report', 'url')
# Adding field 'Report.columns'
db.add_column(u'issues_report', 'columns',
self.gf('django.db.models.fields.TextField')(default='', max_length=5000, blank=True),
keep_default=False)
# Adding field 'Report.filters'
db.add_column(u'issues_report', 'filters',
self.gf('django.db.models.fields.TextField')(default='', max_length=5000, blank=True),
keep_default=False)
# Adding field 'Report.group_by'
db.add_column(u'issues_report', 'group_by',
self.gf('django.db.models.fields.TextField')(default='', max_length=5000, blank=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'Report.url'
db.add_column(u'issues_report', 'url',
self.gf('django.db.models.fields.URLField')(default='', max_length=200),
keep_default=False)
# Deleting field 'Report.columns'
db.delete_column(u'issues_report', 'columns')
# Deleting field 'Report.filters'
db.delete_column(u'issues_report', 'filters')
# Deleting field 'Report.group_by'
db.delete_column(u'issues_report', 'group_by')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'issues.attachment': {
'Meta': {'object_name': 'Attachment'},
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'files'", 'null': 'True', 'to': u"orm['auth.User']"}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'files'", 'null': 'True', 'to': u"orm['issues.Issue']"})
},
u'issues.comment': {
'Meta': {'object_name': 'Comment'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '3000'}),
'description_html': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': u"orm['issues.Issue']"})
},
u'issues.component': {
'Meta': {'object_name': 'Component'},
'component': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
|
},
u'issues.issue': {
'Meta': {'object_name': 'Issue'},
'cc': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'component': ('django.db.models.field
|
s.related.ForeignKey', [], {'blank': 'True', 'related_name': "'issues'", 'null': 'True', 'to': u"orm['issues.Component']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'created_issues'", 'null': 'True', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '3000', 'blank': 'True'}),
'description_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'difficulty': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'milestone': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'issues'", 'null': 'True', 'to': u"orm['issues.Milestone']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'issues'", 'null': 'True', 'to': u"orm['auth.User']"}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blan
|
storyandstructure/django-nomnom
|
setup.py
|
Python
|
mit
| 1,453
| 0.002065
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
VERSION = __import__('nomnom').get_version()
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Developm
|
ent :: Libraries :: Application Frameworks',
]
# get install_requires from requirements.txt
text = open('requirements.txt', 'r')
REQUIREMENTS = text.readlines()
i = 0
while i < len(REQUIREMENTS):
REQUIREMENTS[i] = REQUIREMENTS[i].replace('\n', '')
i += 1
setup(
author="Kevin Harvey",
author_email="kevin@storyandstructure.com",
name='django-nomnom',
version=VERSION,
description='Generic importing tool for the Djan
|
go admin site.',
long_description='',
license='MIT',
keywords='django, import, admin',
url='https://github.com/storyandstructure/django-nomnom/',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=REQUIREMENTS,
packages=find_packages(),
include_package_data=True,
zip_safe = False
)
|
guaix-ucm/numina
|
numina/array/tests/test_bpm.py
|
Python
|
gpl-3.0
| 1,012
| 0.007905
|
import numpy
f
|
rom numina.array.bpm import process_bpm_median
def test_process_bpm():
data = numpy.zeros((10, 10), dtype='float32') + 3.0
mask = numpy.z
|
eros((10, 10), dtype='int32')
mask[3,3] = 1
result1 = process_bpm_median(data, mask)
assert result1[3,3] == 3.0
result2, subs2 = process_bpm_median(data, mask, subs=True)
assert result2[3,3] == 3.0
assert subs2.min() == 1
def test_process_bpm_large_hole():
data = numpy.zeros((100, 100), dtype='float32') + 3.0
mask = numpy.zeros((100, 100), dtype='int32')
mask[30:40,30:40] = 1
fill = 0.1
result, subs = process_bpm_median(data, mask, fill=fill, subs=True)
assert result[35,35] == fill
assert subs.sum() == 100 * 100 - 36
result1, subs1 = process_bpm_median(data, mask, reuse_values=True, subs=True)
assert result1[35,35] == 3.0
assert subs1.sum() == 100 * 100
result2 = process_bpm_median(data, mask, reuse_values=True, subs=False)
assert result2[35,35] == 3.0
|
Reagankm/KnockKnock
|
venv/lib/python3.4/site-packages/nltk/sem/evaluate.py
|
Python
|
gpl-2.0
| 25,345
| 0.004222
|
# Natural Language Toolkit: Models for first-order languages with lambda
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Ewan Klein <ewan@inf.ed.ac.uk>,
# URL: <http://nltk.sourceforge.net>
# For license information, see LICENSE.TXT
#TODO:
#- fix tracing
#- fix iterator-based approach to existentials
"""
This module provides data structures for representing first-order
models.
"""
from __future__ import print_function, unicode_literals
from pprint import pformat
import inspect
import textwrap
import re
from nltk.decorators import decorator # this used in code that is commented out
from nltk.compat import string_types, python_2_unicode_compatible
from nltk.sem.logic import (AbstractVariableExpression, AllExpression, Expression,
AndExpression, ApplicationExpression, EqualityExpression,
ExistsExpression, IffExpression, ImpExpression,
IndividualVariableExpression, LambdaExpression,
NegatedExpression, OrExpression,
Variable, is_indvar)
class Error(Exception): pass
class Undefined(Error): pass
def trace(f, *args, **kw):
argspec = inspect.getargspec(f)
d = dict(zip(argspec[0], args))
if d.pop('trace', None):
print()
for item in d.items():
print("%s => %s" % item)
return f(*args, **kw)
def is_rel(s):
"""
Check whether a set represents a relation (of any arity).
:param s: a set containing tuples of str elements
:type s: set
:rtype: bool
"""
# we have the empty relation, i.e. set()
if len(s) == 0:
return True
# all the elements are tuples of the same length
elif all(isinstance(el, tuple) for el in s) and len(max(s))==len(min(s)):
return True
else:
raise ValueError("Set %r contains sequences of different lengths" % s)
def set2rel(s):
"""
Convert a set containing individuals (strings or numbers) into a set of
unary tuples. Any tuples of strings already in the set are passed through
unchanged.
For example:
- set(['a', 'b']) => set([('a',), ('b',)])
- set([3, 27]) => set([('3',), ('27',)])
:type s: set
:rtype: set of tuple of str
"""
new = set()
for elem in s:
if isinstance(elem, string_types):
new.add((elem,))
elif isinstance(elem, int):
new.add((str(elem,)))
else:
new.add(elem)
return new
def arity(rel):
"""
Check the arity of a relation.
:type rel: set of tuples
:rtype: int of tuple of str
"""
if len(rel) == 0:
return 0
return len(list(rel)[0])
@python_2_unicode_compatible
class Valuation(dict):
"""
A dictionary which represents a model-theoretic Valuation of non-logical constants.
Keys are strings representing the constants to be interpreted, and values correspond
to individuals (represented as strings) and n-ary relations (represented as sets of tuples
of strings).
An instance of ``Valuation`` will raise a KeyError exception (i.e.,
just behave like a standard dictionary) if indexed with an expression that
is not in its list of symbols.
"""
def __init__(self, xs):
"""
:param xs: a list of (symbol, value) pairs.
"""
super(Valuation, self).__init__()
for (sym, val) in xs:
if isinstance(val, string_types) or isinstance(val, bool):
self[sym] = val
elif isinstance(val, set):
self[sym] = set2rel(val)
else:
msg = textwrap.fill("Error in initializing Valuation. "
"Unrecognized value for symbol '%s':\n%s" % (sym, val), width=66)
raise ValueError(msg)
def __getitem__(self, key):
if key in self:
return dict.__getitem__(self, key)
else:
raise Undefined("Unknown expression: '%s'" % key)
def __str__(self):
return pformat(self)
@property
def domain(self):
"""Set-theoretic domain of the value-space of a Valuation."""
dom = []
for val in self.values():
if isinstance(val, string_types):
dom.append(val)
elif not isinstance(val, bool):
dom.extend([elem for tuple_ in val for elem in tuple_ if elem is not None])
return set(dom)
@property
def symbols(self):
"""The non-logical constants which the Valuation recognizes."""
return sorted(self.keys())
@classmethod
def fromstring(cls, s):
return read_valuation(s)
##########################################
# REs used by the _read_valuation function
##########################################
_VAL_SPLIT_RE = re.compile(r'\s*=+>\s*')
_ELEMENT_SPLIT_RE = re.compile(r'\s*,\s*')
_TUPLES_RE = re.compile(r"""\s*
|
(\([^)]+\)) # tuple-expression
\s*""", re.VERBOSE)
def _read_valuation_line(s):
"""
Read a line in a valuation file.
Lines are expected to be of the form::
noosa
|
=> n
girl => {g1, g2}
chase => {(b1, g1), (b2, g1), (g1, d1), (g2, d2)}
:param s: input line
:type s: str
:return: a pair (symbol, value)
:rtype: tuple
"""
pieces = _VAL_SPLIT_RE.split(s)
symbol = pieces[0]
value = pieces[1]
# check whether the value is meant to be a set
if value.startswith('{'):
value = value[1:-1]
tuple_strings = _TUPLES_RE.findall(value)
# are the set elements tuples?
if tuple_strings:
set_elements = []
for ts in tuple_strings:
ts = ts[1:-1]
element = tuple(_ELEMENT_SPLIT_RE.split(ts))
set_elements.append(element)
else:
set_elements = _ELEMENT_SPLIT_RE.split(value)
value = set(set_elements)
return symbol, value
def read_valuation(s, encoding=None):
"""
Convert a valuation string into a valuation.
:param s: a valuation string
:type s: str
:param encoding: the encoding of the input string, if it is binary
:type encoding: str
:return: a ``nltk.sem`` valuation
:rtype: Valuation
"""
if encoding is not None:
s = s.decode(encoding)
statements = []
for linenum, line in enumerate(s.splitlines()):
line = line.strip()
if line.startswith('#') or line=='': continue
try:
statements.append(_read_valuation_line(line))
except ValueError:
raise ValueError('Unable to parse line %s: %s' % (linenum, line))
return Valuation(statements)
@python_2_unicode_compatible
class Assignment(dict):
"""
A dictionary which represents an assignment of values to variables.
An assigment can only assign values from its domain.
If an unknown expression *a* is passed to a model *M*\ 's
interpretation function *i*, *i* will first check whether *M*\ 's
valuation assigns an interpretation to *a* as a constant, and if
this fails, *i* will delegate the interpretation of *a* to
*g*. *g* only assigns values to individual variables (i.e.,
members of the class ``IndividualVariableExpression`` in the ``logic``
module. If a variable is not assigned a value by *g*, it will raise
an ``Undefined`` exception.
A variable *Assignment* is a mapping from individual variables to
entities in the domain. Individual variables are usually indicated
with the letters ``'x'``, ``'y'``, ``'w'`` and ``'z'``, optionally
followed by an integer (e.g., ``'x0'``, ``'y332'``). Assignments are
created using the ``Assignment`` constructor, which also takes the
domain as a parameter.
>>> from nltk.sem.evaluate import Assignment
>>> dom = set(['u1', 'u2', 'u3', 'u4'])
>>> g3 = Assignment(dom, [('x', 'u1'), ('y', 'u2')])
>>> g3 == {'x': 'u1', 'y': 'u2'}
True
There is also a ``print`` format for assignments which uses a notation
closer to that in logic textbooks:
|
dkamotsky/program-y
|
src/test/config/file/test_factory.py
|
Python
|
mit
| 1,628
| 0.0043
|
import unittest
from programy.config.file.factory import ConfigurationFactory
from programy.config.client.client import ClientConfiguration
class ConfigurationFactoryTests(unittest.TestCase):
def test_guess_format_from_filenam
|
e(self):
config_format = ConfigurationFactory.guess_format_from_filename("file.yaml")
self.assertEqual(config_format, "yaml")
config_format = ConfigurationFactory.guess_format_from_filename("file.json")
self.assertEqual(config_format, "json")
confi
|
g_format = ConfigurationFactory.guess_format_from_filename("file.xml")
self.assertEqual(config_format, "xml")
def test_guess_format_no_extension(self):
with self.assertRaises(Exception):
ConfigurationFactory.guess_format_from_filename("file_yaml")
def test_get_config_by_name(self):
client_config = ClientConfiguration()
config_type = ConfigurationFactory.get_config_by_name(client_config, "yaml")
self.assertIsNotNone(config_type)
config_type = ConfigurationFactory.get_config_by_name(client_config, "json")
self.assertIsNotNone(config_type)
config_type = ConfigurationFactory.get_config_by_name(client_config, "xml")
self.assertIsNotNone(config_type)
def test_get_config_by_name_wrong_extension(self):
with self.assertRaises(Exception):
ConfigurationFactory.get_config_by_name("other")
with self.assertRaises(Exception):
ConfigurationFactory.get_config_by_name("")
with self.assertRaises(Exception):
ConfigurationFactory.get_config_by_name(None)
|
micjerry/groupservice
|
handlers/acceptmember.py
|
Python
|
apache-2.0
| 2,851
| 0.004911
|
import tornado.web
import tornado.gen
import json
import io
import logging
import motor
from bson.objectid import ObjectId
import mickey.userfetcher
from mickey.basehandler import BaseHandler
class AcceptMemberHandler(BaseHandler):
@tornado.web.asynchronous
@tornado.gen.coroutine
def post(self):
coll = self.application.db.groups
publish = self.application.publish
token = self.request.headers.get("Authorization", "")
data = json.loads(self.request.body.decode("utf-8"))
groupid = data.get("groupid", "")
inviteid = data.get("invite_id", self.p_userid)
members = data.get("members", [])
logging.info("begin to add members to group %s" % groupid)
if not groupid or not members:
logging.error("invalid request")
self.set_status(403)
self.finish()
return
result = yield coll.find_one({"_id":ObjectId(groupid)})
if not result:
logging.error("group %s does not exist" % groupid)
self.set_status(404)
self.finish()
return
if result.get("owner", "") != self.p_userid:
logging.error("%s are not the owner" % self.p_userid)
self.set_status(403)
self.finish()
return;
#get exist members
exist_ids = [x.get("id", "") for x in result.get("members", [])]
# get members and the receivers
add_members = list(filter(lambda x: x not in exist_ids, [x.get("id", "") for x in members]))
notify = {}
notify["name"] = "mx.group.authgroup_invited"
notify["pub_type"] = "any"
notify["nty_type"] = "device"
notify["msg_type"] = "other"
notify["groupid"] = groupid
notify["groupname"] = result.get("name", "")
notify["userid"] = inviteid
opter_info = yield mickey.userfetcher.getcontact(inviteid, token)
if opter_info:
notify["username"] = opter_info.get("name", "")
else:
logging.error("get user info failed %s" % inviteid)
adddb_members = list(filter(lambda x: x.get("id", "") in add_members, members))
append_result = yield coll.find_and_modify({"_id":ObjectId(groupid)},
{
"$addToSet":{"appendings":{"$each": adddb_members}},
"$unset": {"garbage": 1}
|
})
if append_result:
self.set_status(200)
publish.
|
publish_multi(add_members, notify)
else:
self.set_status(500)
logging.error("add user failed %s" % groupid)
return
self.finish()
|
erik/sketches
|
projects/700c/web/views/api.py
|
Python
|
agpl-3.0
| 54
| 0
|
import flask
mod = flask.
|
Blueprint('api', __name__)
| |
cornell-cup/cs-minibot
|
minibot/hardware/communication/UDP.py
|
Python
|
apache-2.0
| 1,145
| 0.012227
|
# UDP code taken from < https://pymotw.com/2/socket/udp.html >
import socket, time, fcntl, struct
def udpBeacon():
# Create a UDP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
my_ip = getIP('wlan0')
spliced_subnet = my_ip[:my_ip.rfind('.')] + ".255"
# Define broadcasting address and message
serve
|
r_address = (spliced_subnet, 5001)
message = 'Hello, I am a minibot!'
# Send message and resend every 9 seconds
whi
|
le True:
try:
# Send data
print('sending broadcast: "%s"' % message)
sent = sock.sendto(bytes(message, 'utf8'), server_address)
except Exception as err:
print(err)
time.sleep(9)
def getIP(ifname):
"""
Returns the IP of the device
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', bytes(ifname[:15],'utf8'))
)[20:24])
|
KoffeinFlummi/AGM
|
.devfiles/stringtablediag.py
|
Python
|
gpl-2.0
| 3,247
| 0.018479
|
#!/usr/bin/env python3
import os
import sys
from xml.dom import minidom
# STRINGTABLE DIAG TOOL
# Author: KoffeinFlummi
# ---------------------
# Checks for missing translations and all that jazz.
def get_all_languages(projectpath):
""" Checks what languages exist in the repo. """
languages = []
for module in os.listdir(projectpath):
if module[0] == ".":
continue
stringtablepath = os.path.join(projectpath, module, "stringtable.xml")
try:
xmldoc = minidom.parse(stringtablepath)
except:
continue
keys = xmldoc.getElementsByTagName("Key")
for key in keys:
for child in key.childNodes:
try:
if not child.tagName in languages:
languages.append(child.tagName)
except:
continue
return languages
def check_module(projectpath, module, languages):
""" Checks the given module for all the different languages. """
localized = []
stringtablepath = os.path.join(projectpath, module, "stringtable.xml")
try:
xmldoc = minidom.parse(stringtablepath)
except:
return 0, localized
keynumber = len(xmldoc.getElementsByTagName("Key"))
for language in languages:
localized.append(len(xmldoc.getElementsByTagName(language)))
return keynumber, localized
def main():
scriptpath = os.path.realpath(__file__)
projectpath = os.path.dirname(os.path.dirname(scriptpath))
print("#########################")
print("# Stringtable Diag Tool #")
print("#########################")
languages = get_all_languages(projectpath)
print("\nLanguages present in the repo:")
print(", ".join(languages))
keysum = 0
localizedsum = list(map(lambda x: 0, languages))
missing = list(map(lambda x: [], languages))
for module in os.listdir(projectpath):
keynumber, localized = check_module(projectpath, module, languages)
if keynumber == 0:
continue
print("\n# " + module)
keysum += keynumber
for i in range(len(localized)):
print(" %s %s / %i" % ((languages[i]+":").ljust(10), str(localized[i]).ljust(3), keynumber))
localizedsum[i] += localized[i]
if localized[i] < keynumber:
missing[i].append(module[4:])
print("\n###########")
print("# RESULTS #")
print("###########")
print("\nTotal number of keys: %i\n" % (keysum))
for i in range(len(languages)):
if localizedsum[i] == keysum:
print("%s No missing stringtable entries." % ((languages[i] + ":").ljust(12)))
else:
print("%s %s missing stringtable entry/entries." % ((languages[i] + ":").ljust(12), str(keysum - localizedsum[i]).rjust(4)), end="")
print(" ("+", ".join(missing[i])+")")
pri
|
nt("\n\n### MARKDOWN ###")
print("\nTotal number of keys: %i\n" % (keysum))
print("| Language | Missing Entries | Relevant Modules | % done |")
print("|----------|----------------:|------------------|--------|")
for i, language in enumerate(languages):
if localizedsum[i] == keysum:
print("| {} | 0 | - | 100% |".format(language))
else:
print("| {} | {} | {} | {}% |".format
|
(
language,
keysum - localizedsum[i],
", ".join(missing[i]),
round(100 * localizedsum[i] / keysum)))
if __name__ == "__main__":
main()
|
licongyu95/learning_python
|
core_python_programming/cap2/two.py
|
Python
|
unlicense
| 180
| 0.05
|
#!/usr/bin/env python
#encoding=utf-8
from onefile import *
def two():
|
print "at two\n",
def second():
print "at second\n",
if __name__ == '__main__':
two()
#one()
#first
|
()
|
YaguangZhang/EarsMeasurementCampaignCode
|
Trials/lib/Trial6_pySerial_Mod.py
|
Python
|
mit
| 857
| 0.012835
|
import serial
port = "COM5"
baud = 19200
try:
ser = serial.Serial(port, baud, timeout=1)
ser.isOpen() # try to open port, if possible print message and proceed with 'while True:'
print ("port is opened!")
except IOError: # if port is already opened, close it and open it again and print message
ser.close()
ser.open()
print ("port was already open, was closed and opened again!")
def main():
while True:
cmd = raw_input("Enter command or 'exit':")
# for Python 2
# cmd = input("Enter command or 'exit':")
# for Python 3
if cmd == 'exit':
|
ser.close()
exit()
else
|
:
ser.write(cmd.encode('ascii'))
# out = ser.read()
# print('Receiving...'+out)
if __name__ == "__main__":
main()
|
moto-timo/robotframework
|
src/robot/utils/__init__.py
|
Python
|
apache-2.0
| 3,555
| 0.000281
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Various generic utility functions and classes.
Utilities are mainly for internal usage, but external libraries and tools
may find some of them useful. Utilities are generally stable, but absolute
backwards compatibility between major versions is not guaranteed.
All utilities are exposed via the :mod:`robot.utils` package, and should be
used either like::
from robot import utils
assert utils.Matcher('H?llo').match('Hillo')
or::
from robot.utils import Matcher
assert Matcher('H?llo').match('Hillo')
"""
from .argumentparser import ArgumentParser, cmdline2list
from .application import Application
from .compress import compress_text
from .connectioncache import ConnectionCache
from .dotdict import DotDict, OrderedDict
from .encoding import (decode_output, encode_output,
decode_from_system, encode_to_system)
from .error import (get_error_message, get_error_details, ErrorDetails)
from .escaping import escape, unescape, split_from_equals
from .etreewrapper import ET, ETSource
from .frange import frange
from .markuputils import html_format, html_escape, xml_escape, attribute_escape
from .markupwriters import HtmlWriter, XmlWriter, NullMarkupWriter
from .importer import Importer
from .match import eq, Matcher, MultiMatcher
from .misc import (getdoc, isatty, plural_or_not, printable_name, py2to3,
roundup, seq2str, seq2str2)
from .normalizing import lower, normalize, NormalizedDict
from .platform import (IRONPYTHON, JYTHON, PY2, PY3, PYTHON, UNIXY, WINDOWS,
RERAISED_EXCEPTIONS)
from .recommendations import RecommendationFinder
from .robotenv import get_env_var, set_env_var, del_env_var, get_env_vars
from .robotinspect import is_java_init, is_java_method
from .robotio import binary_file_writer, file_writer
from .robotpath import abspath, find_file, get_link_path, normpath
from .robottime import (elapsed_time_to_string, format_time, get_elapsed_time,
get_time, get_timestamp, secs_to_timestamp,
|
secs_to_timestr, timestamp_to_secs, timestr_to_secs,
parse_time)
from .robottypes import (is_bytes, is_dict_like, is_falsy, is_integer,
is_list_like, is_number, is_string
|
, is_truthy,
is_unicode, long, type_name, unicode, StringIO)
from .setter import setter, SetterAwareType
from .sortable import Sortable
from .text import (cut_long_message, format_assign_message,
pad_console_length, get_console_length, split_tags_from_doc,
split_args_from_name_or_path)
from .unic import prepr, unic
from .utf8reader import Utf8Reader
# Used by the old SeleniumLibrary until version 2.9.2.
# https://github.com/robotframework/SeleniumLibrary/issues/261
# TODO: Remove in RF 3.0.
def html_attr_escape(attr):
"""Deprecated!! Use attribute_escape instead."""
return attribute_escape(attr)
|
hydroshare/hydroshare
|
hs_tracking/management/commands/tracking_popular.py
|
Python
|
bsd-3-clause
| 1,309
| 0.001528
|
"""
Check tracking functions for proper output.
"""
from django.core.management.base import BaseCommand
from hs_tracking.models import Variable
class Command(BaseCommand):
help = "check on tracking"
def add_arguments(self, parser):
parser.add_argument('--days', type=int, dest='days', default=31,
help='number of days to list')
parser.add_argument('--resources', type=int, dest='n_resources', default=5,
help='n
|
umber of resources to return')
def handle(self, *args, **options):
days = options['days']
n_resources = options['n_resources']
popular = Variable.popular_resources(days=days, n_resources=n_resources)
for v in popular:
print("users={} short_id={}"
.format(v.users, v.short_id))
print(" title={}".format(v.title))
print(" created={} updated={}"
|
.format(v.created.strftime("%Y-%m-%d %H:%M:%S"),
v.last_updated.strftime("%Y-%m-%d %H:%M:%S")))
print(" published={} public={} discoverable={} first author={}"
.format(v.published,
v.public,
v.discoverable,
v.first_creator))
|
aberdah/Stockvider
|
stockvider/stockviderApp/dbManager.py
|
Python
|
mit
| 33,602
| 0.01157
|
# -*- coding: utf-8 -*-
import threading
import logging
import unittest
import gc
from stockviderApp.utils import retryLogger
from stockviderApp.sourceDA.symbols.referenceSymbolsDA import ReferenceSymbolsDA
from stockviderApp.localDA.symbols.dbReferenceSymbolsDA import DbReferenceSymbolsDA
from stockviderApp.sourceDA.symbols.googleSymbolsDA import GoogleSymbolsDA
from stockviderApp.sourceDA.symbols.yahooSymbolsDA import YahooSymbolsDA
from stockviderApp.sourceDA.symbols.wikiSymbolsDA import WikiSymbolsDA
from stockviderApp.localDA.symbols.dbGoogleSymbolsDA import DbGoogleSymbolsDA
from stockviderApp.localDA.symbols.dbYahooSymbolsDA import DbYahooSymbolsDA
from stockviderApp.localDA.symbols.dbWikiSymbolsDA import DbWikiSymbolsDA
from stockviderApp.localDA.rawData.dbGoogleRawDataDA import DbGoogleRawDataDA
from stockviderApp.localDA.rawData.dbYahooRawDataDA import DbYahooRawDataDA
from stockviderApp.localDA.rawData.dbWikiRawDataDA import DbWikiRawDataDA
from stockviderApp.sourceDA.rawData.googleRawDataDA import GoogleRawDataDA
from stockviderApp.sourceDA.rawData.yahooRawDataDA import YahooRawDataDA
from stockviderApp.sourceDA.rawData.wikiRawDataDA import WikiRawDataDA
from stockviderApp.sourceDA.rawData.referenceRawDataDA import ReferenceRawDataDA
from stockviderApp.localDA.rawData.dbReferenceRawDataDA import DbReferenceRawDataDA
class ThreadUpdateRawData(threading.Thread):
'''
Threading class handling data retrieval from Quand sources.
'''
threadLimiter = threading.BoundedSemaphore(3)
# De toute façon il y en a que 3 max
def __init__(self, _updateFunc, _symbolList, _exchange):
threading.Thread.__init__(self)
self.updateFunc = _updateFunc
self.symbolList = _symbolList
self.exchange = _exchange
# Récupère le logger
self.logger = logging.getLogger(__name__)
return
def run(self):
# Demande l'acquisition d'un thread
ThreadUpdateRawData.threadLimiter.acquire()
# Découpe la liste de symbols en plusieurs listes (pb de mémoire)
listOfSymbsList = ThreadUpdateRawData._chunks(self.symbolList, 100)
for (curIndex, currentSymbolsList) in enumerate(listOfSymbsList):
self.logger.info("Traitement de la liste " + str(curIndex + 1) +
"/" + str(len(listOfSymbsList)))
# Execute la fonction d'update
self.updateFunc(currentSymbolsList, self.exchange)
# Relache proprement la mémoire
gc.collect()
# Release le thread
ThreadUpdateRawData.threadLimiter.release()
return
@classmethod
def _chunks(cls, l, n):
'''
Chuncks and returns the list in multiple list of size n.
:param l: list to chunck
:type l: list
:param n: chunck size
:type n: int
:returns: list of chunked lists
:rtype: list
'''
listOfLists = []
for i in range(0, len(l), n):
listOfLists.append(l[i:i+n])
return listOfLists
class DbManager(object):
'''
Class wrapping all procedures to update the local database from
outside sources.
'''
# A virer en prod - utile en debug pour limiter la récupération de données
limitNbr = None # A none il n'est pas pris en compte
exchangeTuple = ('NYSE', 'NASDAQ')
instrumentType = 'EQUITIES'
def __init__(self):
# Récupère le logger
self.logger = logging.getLogger(__name__)
return
def runTest(self):
'''
Runs tests.
'''
self.logger.info("Début des tests unitaires")
self._runUnitTest()
self.logger.info("Fin des tests unitaires")
return
def runSymbols(self):
'''
Handles the symbol tables update in the database.
'''
self.logger.info("Début de l'update des symbols Quandl")
# Update les symbols des tables Quandl
self._updateQuandlSymbols(self.exchangeTuple)
self.logger.info("Fin de l'update des symbol
|
s Quandl")
self.logger.info("Début de l'update des symbols de référence")
# Upate les symbols de référence
for exchange in self.exchangeTuple:
self.logger.info("Référence - ajout des symbols de " + str(exchange))
self._updateReferenceSymbols(exchange, self.instrumentType)
self.logger.info("Fin de l'update des symbols de référence")
self.logger.info("Début du nettoyage des symbols de réfé
|
rence")
# Nettoie les doublons dans les symbols de ref entre Nyse et Nasdaq
self._cleanDuplicatedReferenceSymbols()
self.logger.info("Fin du nettoyage des symbols de référence")
self.logger.info("Début du mapping des symbols de référence")
# Mappe les symbols de référence
for exchange in self.exchangeTuple:
self.logger.info("Référence - mapping des symbols de " + str(exchange))
self._mapReferenceSymbols(exchange)
self.logger.info("Fin du mapping des symbols de référence")
return
def runQuandlRawData(self, exchange):
'''
Handle the Quandl data update in the database.
'''
self.logger.info("Début de traitement des raw data Quandl de " + str(exchange))
self._updateQuandlRawData(exchange)
return
def runReferenceRawData(self, exchange):
'''
Handle the aggregated data update in the database.
'''
self.logger.info("Début de traitement des raw data Reference de " + str(exchange))
self._updateReferenceRawData(exchange)
return
# -------------------------------------------------------------
def _runUnitTest(self):
'''
Runs unit tests.
'''
# Lance les tests
testsuite = unittest.TestLoader().discover('./test/')
testResults = unittest.TextTestRunner(verbosity=1).run(testsuite)
# Log les erreurs et les echec
self.logger.warning("Erreur de test : ")
for errorString in testResults.errors:
self.logger.error(errorString)
for failureString in testResults.failures:
self.logger.error(failureString)
return
# ------------------ Méthodes d'update des symbols -------------------
@retryLogger(maxRetryNbr=3, sleepTimeIncr=30)
def _updateReferenceSymbols(self, exchange, instrumentType):
'''
Updates the reference symbol table in the database.
:param exchange: name of the exchange
:type exchange: str
:param instrumentType: type of instrument (EQUITIES, BOND, ...)
:type instrumentType: str
.. warning::
This procedure must be called after the local update of the Quandl
symbols.
'''
# Met en place le DAO des symbols de référence
referenceSymbolsDAO = ReferenceSymbolsDA(exchange)
dbReferenceSymbolDAO = DbReferenceSymbolsDA()
# Récupère les symbols de référence
referenceSymbolsList = referenceSymbolsDAO.returnSymbolsList(instrumentType)[0:DbManager.limitNbr]
# Les ajoute en DB
dbReferenceSymbolDAO.updateOrAddSymbolsList(referenceSymbolsList, exchange)
self.logger.info("Référence - symbols ajoutés dans la table")
return
def _cleanDuplicatedReferenceSymbols(self):
'''
Cleans the reference symbol table from duplicated entries.
.. note::
It may happen that the same symbol is listed on both NYSE and
NASDAQ. This procedure ensures that these symbols are only recorded
once in the database.
'''
# Met en place le DAO local des sym
|
finklabs/aws-deploy
|
botodeploy/tool.py
|
Python
|
mit
| 3,485
| 0.000287
|
#!/usr/bin/env python
import os
import sys
import inspect
from functools import update_wrapper
import getpass
import copy
import click
import signals
from .config_reader import read_config
from .utils import version, dict_merge
from .defaults import DEFAULT_CONFIG
# add fixture feature to pocoo-click
def _make_command(f, name, attrs, cls):
if isinstance(f, click.Command):
raise TypeError('Attempted to convert a callback into a '
'command twice.')
new_func = f
if 'fixture' in attrs:
fixture = attrs['fixture']
if not inspect.isgeneratorfunction(fixture):
raise TypeError('fixture does not yield anything.')
attrs.pop('fixture')
def new_func(*args, **kwargs):
it = fixture()
val = next(it)
res = f(val, *args[1:], **kwargs)
try:
next(it)
except StopIteration:
pass
else:
raise RuntimeError('fixture has more than one yield.')
return res
try:
new_func.__click_params__ = f.__click_params__
except AttributeError:
pass
update_wrapper(new_func, f)
try:
params = new_func.__click_params__
params.reverse()
del new_func.__click_params__
except AttributeError:
params = []
help = attrs.get('help')
if help is None:
help = inspect.getdoc(f)
if isinstance(help, bytes):
help = help.decode('utf-8')
else:
help = inspect.cleandoc(help)
attrs['help'] = help
click.decorators._check_for_unicode_literals()
return cls(name=name or new_func.__name__.low
|
er(),
callback=new_func, params=params, **attrs)
# patch in the custom command maker
click.decorators._make_command = _make_command
def _get_env():
"""Read ENV environment variable.
"""
env = os.getenv('ENV', '')
if env:
env = env.lower()
return env
def _get_context():
"""assemble the tool context. Private members are preceded by a '_'.
:return: dictionary containing the tool context
"""
# TODO: elapsed, artifact
|
click_xtc = click.get_current_context()
context = {
'tool': click_xtc.parent.info_name,
'command': click_xtc.info_name,
'version': version(),
'user': getpass.getuser()
}
env = _get_env()
if env:
context['env'] = env
return context
def lifecycle():
"""Tool lifecycle which provides hooks into the different stages of the
command execution. See signals for hook details.
"""
context = _get_context()
signals.initialized.send(context)
config, ok = read_config(context, DEFAULT_CONFIG)
if not ok:
signals.exit_on_error.send(context)
sys.exit()
# TODO credentials_retr
# TODO check credentials are valid
signals.config_validation_init.send((context, config))
# TODO config validation
signals.config_validation_finalized.send((context, config))
# merge DEFAULT_CONFIG with config
tool_config = copy.deepcopy(DEFAULT_CONFIG[context['tool']])
dict_merge(tool_config, config[context['tool']])
# TODO lookups
# run the command and provide context and config
signals.command_init.send((context, tool_config))
yield context, tool_config
signals.command_finalized.send((context, tool_config))
signals.finalized.send(context)
|
YangTe1/site_at_will
|
site_at_will/zhihu/urls.py
|
Python
|
gpl-3.0
| 470
| 0.010638
|
from django.conf.
|
urls import url
from . import views
urlpatterns = [
url(r'psycholagny/', views.psycholagny_zhihu_user, name="psycholagny_zhihu_user"),
ur
|
l(r'followed_zhihu_user/add/', views.add_followed_zhihu_user, name="add_followed_zhihu_user"),
url(r'ajax_hidden_zhihu_user/', views.ajax_hidden_zhihu_user, name="ajax_hidden_zhihu_user"),
url(r'ajax_select_many_zhihu_user/', views.ajax_select_many_zhihu_user, name="ajax_select_many_zhihu_user"),
]
|
dramatis/dramatis
|
lib/dramatis/actor/interface.py
|
Python
|
mit
| 4,979
| 0.011448
|
from __future__ import absolute_import
from __future__ import with_statement
import time
from logging import warning
import dramatis
class Interface(object):
"""provides actors with control over their runtime dynamics
A dramatis.Actor.Interface object provides actors that have mixed
in dramatis.Actor access to their actor name and other actor
operations. An instance of dramatis.Actor.Interface is typically
accessed through via self.actor.
Many of the interface method affect the <em>gate behavior</em> of
the actor, that is, whether tasks queued for the actor are allowed
to execute. With functions refuse, accept, default, and always, an
actor can control task scheduling.
Most of these methods accept an array of arguments that are matched
against each method by the runtime when determining whether a task
can be scheduled.
Each element in the array is tested, as "equal or subclass",
against the method and arguments of the task
underconsideration. If all the arguments match, the pattern
matches. Extra task parameters are ignored and the match
succeeds. If there are more arguments in the pattern than there
are associated with the task, the match fails.
Note that the interaction of multiple calls is a bit complex and currently
not documented. See the examples and tutorials.
This object should only be accessed from the actor it represents."""
def __init__(self,actor):
self._actor = actor
def refuse( self, *args ):
"""Blocks the actor from running any tasks that match pattern_args.
Note that subsequent gate calls may override this behavior."""
self._actor._gate.refuse( "object", *args )
def accept( self, *args ):
"""Enables the actor to run tasks that match pattern_args.
Note that subsequent gate calls may override this behavior."""
self._actor._gate.accept( "object", *args )
def default( self, *args ):
"""Reverts the behavior of the actor to tasks matching pattern_args.
Behavior is reverted to the default. It un-does the affect of
a call to refuse or accept with the same arguments."""
self._actor._gate.default( ( "object", ) + args )
def always( self, args, value ):
"""Causes tasks matching pattern_args to always be accepted if +value+
is +true+ or reject if +value+ is +false+.
Always takes precendence over refuse/accept so a task that
matches both a refuse pattern and an always( ..., true )
pattern will be allowed. always also overrides the implict
gating in rpc method calls."""
if not isinstance( args, tuple ):
args = ( args, )
self._actor._gate.always( ( ( "object", ) + args ), value )
def enable_call_threading( self ):
"""Enables call threading for actor method calls made by this actor.
When call threading is enabled, method gating is modified such
that recursive and co-recursive calls are allowed. Normally
blocking calls made by an actor on itself, e.g.,
actor.name.some_method
would cause a deadlock. When call threading is enabled,
recursion, both self-recursion and co-recursion (actor A does
an rpc on actor B which does an rpc on actor A), is allowed."""
self._actor._set_call_threading_enabled(True)
@property
def name( self ):
"Returns the actor name for the object."
return self._actor and self._actor.name
def actor_yield(self, t = 0):
"""Yields the actor to allow other tasks to be executed.
Currently, messages are handled FIFO so the yield will
return when all the messages received up to the point of the
yield are executed. This could be modified if non-FIFO queue
processing is added."""
class Sleeper ( dramatis.Actor ):
def nap(self, t):
time.sleep( t )
if t > 0:
sleeper = Sleeper()
( dramatis.interfac
|
e( sleeper ).
continuation( { "continuation": "rpc",
|
"nonblocking": True } ) ).nap( t )
self._actor.actor_send( [ "actor_yield" ], { "continuation": "rpc",
"nonblocking": True } )
return None
def become(self, behavior):
"""The actor behavior is changed to the provided behavior.
All future tasks will be sent to that behavior.
If either the new or old behaviors mix in
dramatis.Actor.Behavior, their actor methods will be changed
as appropriate (to return/not return nil)
Become has the side effect of making the actor schedulable immediately
since the new behavior is not by definition executing any tasks."""
self._actor.become( behavior )
def _gate( self ):
return self._actor._gate
|
FabriceSalvaire/PyOpenGLng
|
examples/high-level-api-demo/ShaderProgramesV3.py
|
Python
|
gpl-3.0
| 3,221
| 0.009935
|
####################################################################################################
#
# PyOpenGLng - An OpenGL Python Wrapper with a High Level API.
# Copyright (C) 2014 Fabrice Salvaire
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# t
|
he Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU
|
General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
####################################################################################################
import os
####################################################################################################
from PyOpenGLng.HighLevelApi.Shader import GlShaderManager, GlShaderProgramInterface
####################################################################################################
class ConfigPath(object):
module_path = os.path.dirname(__file__)
##############################################
@staticmethod
def glsl(file_name):
return os.path.join(ConfigPath.module_path, 'glslv3', file_name)
####################################################################################################
shader_manager = GlShaderManager()
position_shader_program_interface = GlShaderProgramInterface(uniform_blocks=('viewport',),
attributes=('position',))
texture_shader_program_interface = GlShaderProgramInterface(uniform_blocks=('viewport',),
attributes=('position',
'position_uv'))
if shader_manager.has_visual():
for shader_name in (
'fixed_colour_vertex_shader',
'simple_fragment_shader',
'texture_fragment_shader',
'texture_vertex_shader',
):
shader_manager.load_from_file(shader_name, ConfigPath.glsl(shader_name + '.glsl'))
for args in (
{'program_name':'fixed_shader_program',
'shader_list':('fixed_colour_vertex_shader',
'simple_fragment_shader'),
'program_interface':texture_shader_program_interface,
},
{'program_name':'texture_shader_program',
'shader_list':('texture_vertex_shader',
'texture_fragment_shader'),
'program_interface':texture_shader_program_interface,
},
):
shader_manager.link_program(**args)
####################################################################################################
#
# End
#
####################################################################################################
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/bar/_uirevision.py
|
Python
|
mit
| 398
| 0
|
import _plotly_utils.basevalidators
class UirevisionValidator(
|
_plotly_utils.basevalidators.AnyValidator):
def __init__(self, plotly_name="uirevision", parent_name="bar", **kwargs):
super(UirevisionValid
|
ator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
|
diblaze/TDP002
|
Old Exams/201510/Uppgift3.py
|
Python
|
mit
| 1,026
| 0
|
#! /usr/env/bin python3
def scrape_rates(filename):
dict_rate = {}
with open(filename) as f:
for line in f.readlines():
line = line.split(
|
"\t")
line[1] = line[1].replace("\n", "")
line[1] = line[1].replace(",", ".")
dict_rate[line[0]] = line[1]
return dict_rate
def convert_to_sek(string,
|
dict_rate):
string = string.split(" ")
if len(string) == 2:
currency_multiply = dict_rate[string[0]]
value = string[1]
return float(currency_multiply) * float(value)
else:
return float(string[0])
if __name__ == "__main__":
dict_rate = scrape_rates("exchange_rates.txt")
list_of_inputs = []
print("Mata in belopp (avsluta med 0):")
userInput = ""
while userInput != "0":
userInput = input()
list_of_inputs.append(userInput)
sumOfInputs = float(0)
for i in list_of_inputs:
sumOfInputs += convert_to_sek(i, dict_rate)
print("Totalsumma: %.2f" % sumOfInputs)
|
Zarthus/Reconcile
|
modules/topic.py
|
Python
|
mit
| 2,286
| 0.001312
|
"""
The MIT License (MIT)
Copyright (c) 2014 - 2015 Jos "Zarthus" Ahrens and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, di
|
stribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, s
|
ubject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
Topic by Zarthus
Licensed under MIT
Fetches the channel topic and returns it to the channel.
"""
from core import moduletemplate
class Topic(moduletemplate.BotModule):
def on_module_load(self):
self.register_command("topic", None, "Get the topic from the channel.", self.PRIV_NONE)
self.checking_topic = False
def on_command(self, target, nick, command, commandtext, mod, admin):
if command == "topic" and target.startswith("#"):
self.checking_topic = target.lower()
self.send_raw("TOPIC {}".format(target))
return True
return False
def on_numeric(self, numeric, data):
# Numeric for topics.
if numeric == 332:
if self.checking_topic:
splitdata = data.split()
if self.checking_topic == splitdata[3].lower():
self.message(self.checking_topic, None, "Topic for {}: {}"
.format(self.checking_topic,
" ".join(splitdata[4:])[1:]))
self.checking_topic = False
|
xczh/ccoin
|
modules/__init__.py
|
Python
|
apache-2.0
| 185
| 0.021622
|
#!/usr/bin/env python
#coding:utf-8
"""
Purpose: ccoin Modules Package
Author: xczh <christopher.winnie2012@gmail.com>
Copyright (c) 2015 xczh. All r
|
ights reserved.
""
|
"
|
dymkowsk/mantid
|
Framework/PythonInterface/test/python/plugins/algorithms/VesuvioThicknessTest.py
|
Python
|
gpl-3.0
| 3,285
| 0.005479
|
from __future__ import (absolute_import, division, print_function)
import unittest
import platform
import num
|
py as np
from mantid.simpleapi import VesuvioThickness
from mantid.api import ITableWorkspace
class VesuvioThicknessTest(unittest.TestCase):
#----------------------------------Algorithm tests----------------------------------------
def test_basic_input(self):
# Original test values
|
from fortran routines
masses = [1.0079, 27.0, 91.0]
amplitudes = [0.9301589, 2.9496644e-02, 4.0345035e-02]
trans_guess = 0.831
thickness = 5.0
number_density = 1.0
dens_table, trans_table = VesuvioThickness(Masses=masses,
Amplitudes=amplitudes,
TransmissionGuess=trans_guess,
Thickness=thickness,
NumberDensity=number_density)
# Validate shape
self._validate_shape(dens_table)
self._validate_shape(trans_table)
self.assertAlmostEqual(dens_table.cell(0,1), 22.4062053)
self.assertAlmostEqual(dens_table.cell(9,1), 24.4514601)
self.assertAlmostEqual(trans_table.cell(0,1), 0.99245745)
self.assertAlmostEqual(trans_table.cell(9,1), 0.83100000)
#----------------------------------Failure cases------------------------------------------------
def test_bad_input(self):
masses = ['test', 'bad', 'input']
amplitudes = ['test', 'bad', 'input']
self.assertRaises(TypeError, VesuvioThickness, Masses=masses,
Amplitudes=amplitudes,
DensityWorkspace='dens_tbl',
TransmissionWorkspace='trans_tbl')
def test_mismatch_mass_amplitude_inputs(self):
masses = [1.0, 2.0, 3.0, 4.0]
amplitudes = [1.0, 2.0]
self.assertRaises(RuntimeError, VesuvioThickness, Masses=masses,
Amplitudes=amplitudes,
DensityWorkspace='dens_tbl',
TransmissionWorkspace='trans_tbl')
def test_no_masses_input(self):
masses = []
amplitudes =[1.0, 2.0]
self.assertRaises(RuntimeError, VesuvioThickness, Masses=masses,
Amplitudes=amplitudes,
DensityWorkspace='dens_tbl',
TransmissionWorkspace='trans_tbl')
def test_no_amplitudes_input(self):
masses = [1.0, 2.0]
amplitudes =[]
self.assertRaises(RuntimeError, VesuvioThickness, Masses=masses,
Amplitudes=amplitudes,
DensityWorkspace='dens_tbl',
TransmissionWorkspace='trans_tbl')
#--------------------------------Validate results------------------------------------------------
def _validate_shape(self, table_ws):
self.assertTrue(isinstance(table_ws, ITableWorkspace))
self.assertEqual(table_ws.columnCount(), 2)
self.assertEqual(table_ws.rowCount(), 10)
self.assertEqual(table_ws.cell(0,0), str(1))
self.assertEqual(table_ws.cell(9,0), str(10))
if __name__=="__main__":
unittest.main()
|
jelly/calibre
|
src/calibre/ebooks/lrf/html/table.py
|
Python
|
gpl-3.0
| 13,980
| 0.001788
|
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
import math, sys, re
from calibre.ebooks.lrf.fonts import get_font
from calibre.ebooks.lrf.pylrs.pylrs import TextBlock, Text, CR, Span, \
CharButton, Plot, Paragraph, \
LrsTextTag
def ceil(num):
return int(math.ceil(num))
def print_xml(elem):
from calibre.ebooks.lrf.pylrs.pylrs import ElementWriter
elem = elem.toElement('utf8')
ew = ElementWriter(elem, sourceEncoding='utf8')
ew.write(sys.stdout)
print
def cattrs(base, extra):
new = base.copy()
new.update(extra)
return new
def tokens(tb):
'''
Return the next token. A token is :
1. A string
a block of text that has the same style
'''
def process_element(x, attrs):
if isinstance(x, CR):
yield 2, None
elif isinstance(x, Text):
yield x.text, cattrs(attrs, {})
elif isinstance(x, basestring):
yield x, cattrs(attrs, {})
elif isinstance(x, (CharButton, LrsTextTag)):
if x.contents:
if hasattr(x.contents[0], 'text'):
yield x.contents[0].text, cattrs(attrs, {})
elif hasattr(x.contents[0], 'attrs'):
for z in process_element(x.contents[0], x.contents[0].attrs):
yield z
elif isinstance(x, Plot):
yield x, None
elif isinstance(x, Span):
attrs = cattrs(attrs, x.attrs)
for y in x.contents:
for z in process_element(y, attrs):
yield z
for i in tb.contents:
if isinstance(i, CR):
yield 1, None
elif isinstance(i, Paragraph):
for j in i.contents:
attrs = {}
if hasattr(j, 'attrs'):
attrs = j.attrs
for k in process_element(j, attrs):
yield k
class Cell(object):
def __init__(self, conv, tag, css):
self.conv = conv
self.tag = tag
self.css = css
self.text_blocks = []
self.pwidth = -1.
if tag.has_key('width') and '%' in tag['width']: # noqa
try:
self.pwidth = float(tag['width'].replace('%', ''))
except ValueError:
pass
if css.has_key('width') and '%' in css['width']: # noqa
try:
self.pwidth = float(css['width'].replace('%', ''))
except ValueError:
pass
if self.pwidth > 100:
self.pwidth = -1
self.rowspan = self.colspan = 1
try:
self.colspan = int(tag['colspan']) if tag.has_key('colspan') else 1 # noqa
self.rowspan = int(tag['rowspan']) if tag.has_key('rowspan') else 1 # noqa
except:
pass
pp = conv.current_page
conv.book.allow_new_page = False
conv.current_page = conv.book.create_page()
conv.parse_tag(tag, css)
conv.end_current_block()
for item in conv.current_page.contents:
if isinstance(item, TextBlock):
self.text_blocks.append(item)
conv.current_page = pp
conv.book.allow_new_page = True
if not self.text_blocks:
tb = conv.book.create_text_block()
tb.Paragraph(' ')
self.text_blocks.append(tb)
for tb in self.text_blocks:
tb.parent = None
tb.objId = 0
# Needed as we have to eventually change this BlockStyle's width and
# height attributes. This blockstyle may be shared with other
# elements, so doing that causes havoc.
tb.blockStyle = conv.book.create_block_style()
ts = conv.book.create_text_style(**tb.textStyle.attrs)
ts.attrs['parindent'] = 0
tb.textStyle = ts
if ts.attrs['align'] == 'foot':
if isinstance(tb.contents[-1], Paragraph):
tb.contents[-1].append(' ')
def pts_to_pixels(self, pts):
pts = int(pts)
return cei
|
l((float(self.conv.profile.dpi)/72.)*(pts/10.))
def minimum_width(self):
return max([self.minimum_tb_width(tb) for tb in self.text_blocks])
def minimum_tb_wi
|
dth(self, tb):
ts = tb.textStyle.attrs
default_font = get_font(ts['fontfacename'], self.pts_to_pixels(ts['fontsize']))
parindent = self.pts_to_pixels(ts['parindent'])
mwidth = 0
for token, attrs in tokens(tb):
font = default_font
if isinstance(token, int): # Handle para and line breaks
continue
if isinstance(token, Plot):
return self.pts_to_pixels(token.xsize)
ff = attrs.get('fontfacename', ts['fontfacename'])
fs = attrs.get('fontsize', ts['fontsize'])
if (ff, fs) != (ts['fontfacename'], ts['fontsize']):
font = get_font(ff, self.pts_to_pixels(fs))
if not token.strip():
continue
word = token.split()
word = word[0] if word else ""
width = font.getsize(word)[0]
if width > mwidth:
mwidth = width
return parindent + mwidth + 2
def text_block_size(self, tb, maxwidth=sys.maxint, debug=False):
ts = tb.textStyle.attrs
default_font = get_font(ts['fontfacename'], self.pts_to_pixels(ts['fontsize']))
parindent = self.pts_to_pixels(ts['parindent'])
top, bottom, left, right = 0, 0, parindent, parindent
def add_word(width, height, left, right, top, bottom, ls, ws):
if left + width > maxwidth:
left = width + ws
top += ls
bottom = top+ls if top+ls > bottom else bottom
else:
left += (width + ws)
right = left if left > right else right
bottom = top+ls if top+ls > bottom else bottom
return left, right, top, bottom
for token, attrs in tokens(tb):
if attrs is None:
attrs = {}
font = default_font
ls = self.pts_to_pixels(attrs.get('baselineskip', ts['baselineskip']))+\
self.pts_to_pixels(attrs.get('linespace', ts['linespace']))
ws = self.pts_to_pixels(attrs.get('wordspace', ts['wordspace']))
if isinstance(token, int): # Handle para and line breaks
if top != bottom: # Previous element not a line break
top = bottom
else:
top += ls
bottom += ls
left = parindent if int == 1 else 0
continue
if isinstance(token, Plot):
width, height = self.pts_to_pixels(token.xsize), self.pts_to_pixels(token.ysize)
left, right, top, bottom = add_word(width, height, left, right, top, bottom, height, ws)
continue
ff = attrs.get('fontfacename', ts['fontfacename'])
fs = attrs.get('fontsize', ts['fontsize'])
if (ff, fs) != (ts['fontfacename'], ts['fontsize']):
font = get_font(ff, self.pts_to_pixels(fs))
for word in token.split():
width, height = font.getsize(word)
left, right, top, bottom = add_word(width, height, left, right, top, bottom, ls, ws)
return right+3+max(parindent, 10), bottom
def text_block_preferred_width(self, tb, debug=False):
return self.text_block_size(tb, sys.maxint, debug=debug)[0]
def preferred_width(self, debug=False):
return ceil(max([self.text_block_preferred_width(i, debug=debug) for i in self.text_blocks]))
def height(self, width):
return sum([self.text_block_size(i, width)[1] for i in self.text_blocks])
class Row(object):
def __init__(self, conv, row, css, colpad):
self.cells = []
self.colpad = colpad
cells = row.findAll(re.compile('td|th', re.IGNORECASE))
|
sigmapi-gammaiota/sigmapi-web
|
sigmapiweb/apps/PubSite/views.py
|
Python
|
mit
| 4,610
| 0.000651
|
"""
Views for PubSite app.
"""
from django.conf import settings
from django.contrib.auth.views import (
PasswordResetView,
PasswordResetDoneView,
PasswordResetConfirmView,
PasswordResetCompleteView,
)
from django.shortcuts import render
import requests
import logging
logger = logging.getLogger(__name__)
def _get_context(page_name):
return {
"pages": settings.PUBLIC_PAGES,
"current_page_name": page_name,
}
# Regular index
# def index(request):
# """
# View for the static index page
# """
# return render(request, 'public/home.html', _get_context('Home'))
def index(request):
"""
View for the static index page
"""
return render(request, "public/home.html", _get_context("Home"))
def about(request):
"""
View for the static chapter history page.
"""
return render(request, "public/about.html", _get_context("About"))
def activities(request):
"""
View for the static chapter service page.
"""
return render(
request,
"public/activities.html",
_get_context("Service & Activities"),
)
def rush(request):
"""
View for the static chapter service page.
"""
return render(
request,
"public/rush.html",
_get_context("Rush"),
)
def campaign(request):
"""
View for the campaign service page.
"""
# Overrride requests Session authentication handling
class NoRebuildAuthSession(requests.Session):
def rebuild_auth(self, prepared_request, response):
"""
No code here means requests will always preserve the Authorization
header when redirected.
Be careful not to leak your credentials to untrusted hosts!
"""
url = "https://api.givebutter.com/v1/transactions/"
headers = {"Authorization": f"Bearer {settings.GIVEBUTTER_API_KEY}"}
response = None
# Create custom requests session
session = NoRebuildAuthSession()
# Make GET request to server, timeout in seconds
try:
r = session.get(url, headers=headers, timeout=0.75)
if r.status_code == 200:
response = r.json()
else:
logger.error(f"ERROR in request: {r.status_code}")
except requests.exceptions.Timeout:
logger.warning("Connection to GiveButter API Timed out")
except requests.ConnectionError:
logger.warning("Connection to GiveButter API could not be resolved")
except requests.exceptions.RequestException:
logger.error(
"An unknown issue occurred while trying to retrieve GiveButter Donor List"
)
# Grab context object to use later
ctx = _get_context("Campaign")
# Check for successful response, if so - filter, sort, and format data
if response and "data" in response:
response = response["data"] # Pull data from GET response object
logger.debug(f"GiveButter API Response: {response}")
# Filter by only successful transactions, then sort by amount descending
successful_txs = [tx for tx in response if tx["status"] == "succeeded"]
sorted_txs = sorted(successful_txs, key=lambda tx: tx["amount"], reverse=True)
# Clean data to a list of dictionaries & remove unnecessary data
transactions = [
{
"name": tx["giving_space"]["name"],
"amount": tx["giving_space"]["amount"],
"message": tx["giving_space"]["message"],
}
for tx in sorted_txs[:20]
]
# Attach transaction dictionary & length to context object
ctx["transactions"] = transactions
ctx["num_txs"] = len(successful_txs)
return render(
request,
"public/campaign.html",
ctx,
)
def permission_denied(request):
"""
View for 403 (Permission Denied) error.
"""
return render(
request,
"common/403.html",
_get_context("Permission Denied"),
)
def handler404(request, exception):
""" """
return render(request, "common/404.html", _get_context("Page Not Found"))
class ResetPassword(PasswordResetView):
template_name = "password_reset/password_reset_form.html"
class ResetPasswordDone(PasswordResetDoneView):
template_name = "password_reset/password_reset_done.html"
class ResetPasswordConfirm(PasswordResetConfirmView):
template_name = "password_reset/password_reset_confirm.html"
class ResetPasswordComplete(Passwor
|
dResetCompleteView):
template_name
|
= "password_reset/password_reset_complete.html"
|
jzcxer/0Math
|
python/dictionary/Merriam_Webster_api.py
|
Python
|
gpl-3.0
| 987
| 0.020263
|
from lxml import etree
import requests
import re
#coding utf-8
def getResource(word):
r = requests.get("http://www.dictionaryapi.com/api/v1/references/learners/xml/"+word+"?key=508b6e11-3920-41fe-a57a-d379deacf188")
return r.text[39:]
def isWord(entry,word):
g=re.compile(entry)
return re.fullmatch(word+"\[*\d*\]*",entry) is not None
def parse_entry(entry):
meanings=[]
for children in entry:
if children.tag=="fl":
meanings.append(children.text)
if childr
|
en.tag=="def":
for x in children:
if x.tag=="dt":
if x.text is not None:
meanings.append(x.text[1:])
return meanings
# main loop
def getDefintion(word):
root = etree.XML(getResou
|
rce(word), etree.XMLParser(remove_blank_text=True))
meaning_list=[]
for entry in root:
if isWord(entry.attrib["id"],word):
meaning_list.append(parse_entry(entry))
return meaning_list
|
MSeifert04/astropy
|
astropy/units/format/generic.py
|
Python
|
bsd-3-clause
| 18,514
| 0.000162
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# This module includes files automatically generated from ply (these end in
# _lextab.py and _parsetab.py). To generate these files, remove them from this
# folder, then build astropy and run the tests in-place:
#
# python setup.py build_ext --inplace
# pytest astropy/units
#
# You can then commit the changes to the re-generated _lextab.py and
# _parsetab.py files.
"""
Handles a "generic" string format for units
"""
import os
import re
import warnings
import sys
from fractions import Fraction
import unicodedata
from . import core, utils
from .base import Base
from astropy.utils import classproperty
from astropy.utils.misc import did_you_mean
def _is_ascii(s):
if sys.version_info >= (3, 7, 0):
return s.isascii()
else:
try:
s.encode('ascii')
return True
except UnicodeEncodeError:
return False
def _to_string(cls, unit):
if isinstance(unit, core.CompositeUnit):
parts = []
if cls._show_scale and unit.scale != 1:
parts.append(f'{unit.scale:g}')
if len(unit.bases):
positives, negatives = utils.get_grouped_by_powers(
unit.bases, unit.powers)
if len(positives):
parts.append(cls._format_unit_list(positives))
elif len(parts) == 0:
parts.append('1')
if len(negatives):
parts.append('/')
unit_list = cls._format_unit_list(negatives)
if len(negatives) == 1:
parts.append(f'{unit_list}')
else:
|
parts.append(f'({unit_list})')
return ' '.join(parts)
elif isinstance(unit, core.NamedUnit):
return cls._get_unit_name(unit)
class Generic(Base):
"""
A "generic" format.
The syntax of the format is based directly on the FITS standard,
but instead of only supporting the units that FITS knows about, it
supports any unit available in the `astropy.units` namespace.
|
"""
_show_scale = True
_tokens = (
'DOUBLE_STAR',
'STAR',
'PERIOD',
'SOLIDUS',
'CARET',
'OPEN_PAREN',
'CLOSE_PAREN',
'FUNCNAME',
'UNIT',
'SIGN',
'UINT',
'UFLOAT'
)
@classproperty(lazy=True)
def _all_units(cls):
return cls._generate_unit_names()
@classproperty(lazy=True)
def _units(cls):
return cls._all_units[0]
@classproperty(lazy=True)
def _deprecated_units(cls):
return cls._all_units[1]
@classproperty(lazy=True)
def _functions(cls):
return cls._all_units[2]
@classproperty(lazy=True)
def _parser(cls):
return cls._make_parser()
@classproperty(lazy=True)
def _lexer(cls):
return cls._make_lexer()
@classmethod
def _make_lexer(cls):
from astropy.extern.ply import lex
tokens = cls._tokens
t_STAR = r'\*'
t_PERIOD = r'\.'
t_SOLIDUS = r'/'
t_DOUBLE_STAR = r'\*\*'
t_CARET = r'\^'
t_OPEN_PAREN = r'\('
t_CLOSE_PAREN = r'\)'
# NOTE THE ORDERING OF THESE RULES IS IMPORTANT!!
# Regular expression rules for simple tokens
def t_UFLOAT(t):
r'((\d+\.?\d*)|(\.\d+))([eE][+-]?\d+)?'
if not re.search(r'[eE\.]', t.value):
t.type = 'UINT'
t.value = int(t.value)
elif t.value.endswith('.'):
t.type = 'UINT'
t.value = int(t.value[:-1])
else:
t.value = float(t.value)
return t
def t_UINT(t):
r'\d+'
t.value = int(t.value)
return t
def t_SIGN(t):
r'[+-](?=\d)'
t.value = int(t.value + '1')
return t
# This needs to be a function so we can force it to happen
# before t_UNIT
def t_FUNCNAME(t):
r'((sqrt)|(ln)|(exp)|(log)|(mag)|(dB)|(dex))(?=\ *\()'
return t
def t_UNIT(t):
"%|([YZEPTGMkhdcmu\N{MICRO SIGN}npfazy]?'((?!\\d)\\w)+')|((?!\\d)\\w)+"
t.value = cls._get_unit(t)
return t
t_ignore = ' '
# Error handling rule
def t_error(t):
raise ValueError(
f"Invalid character at col {t.lexpos}")
lexer_exists = os.path.exists(os.path.join(os.path.dirname(__file__),
'generic_lextab.py'))
lexer = lex.lex(optimize=True, lextab='generic_lextab',
outputdir=os.path.dirname(__file__),
reflags=int(re.UNICODE))
if not lexer_exists:
cls._add_tab_header('generic_lextab')
return lexer
@classmethod
def _make_parser(cls):
"""
The grammar here is based on the description in the `FITS
standard
<http://fits.gsfc.nasa.gov/standard30/fits_standard30aa.pdf>`_,
Section 4.3, which is not terribly precise. The exact grammar
is here is based on the YACC grammar in the `unity library
<https://bitbucket.org/nxg/unity/>`_.
This same grammar is used by the `"fits"` and `"vounit"`
formats, the only difference being the set of available unit
strings.
"""
from astropy.extern.ply import yacc
tokens = cls._tokens
def p_main(p):
'''
main : product_of_units
| factor product_of_units
| factor product product_of_units
| division_product_of_units
| factor division_product_of_units
| factor product division_product_of_units
| inverse_unit
| factor inverse_unit
| factor product inverse_unit
| factor
'''
from astropy.units.core import Unit
if len(p) == 2:
p[0] = Unit(p[1])
elif len(p) == 3:
p[0] = Unit(p[1] * p[2])
elif len(p) == 4:
p[0] = Unit(p[1] * p[3])
def p_division_product_of_units(p):
'''
division_product_of_units : division_product_of_units division product_of_units
| product_of_units
'''
from astropy.units.core import Unit
if len(p) == 4:
p[0] = Unit(p[1] / p[3])
else:
p[0] = p[1]
def p_inverse_unit(p):
'''
inverse_unit : division unit_expression
'''
p[0] = p[2] ** -1
def p_factor(p):
'''
factor : factor_fits
| factor_float
| factor_int
'''
p[0] = p[1]
def p_factor_float(p):
'''
factor_float : signed_float
| signed_float UINT signed_int
| signed_float UINT power numeric_power
'''
if cls.name == 'fits':
raise ValueError("Numeric factor not supported by FITS")
if len(p) == 4:
p[0] = p[1] * p[2] ** float(p[3])
elif len(p) == 5:
p[0] = p[1] * p[2] ** float(p[4])
elif len(p) == 2:
p[0] = p[1]
def p_factor_int(p):
'''
factor_int : UINT
| UINT signed_int
| UINT power numeric_power
| UINT UINT signed_int
| UINT UINT power numeric_power
'''
if cls.name == 'fits':
raise ValueError("Numeric factor not supported by FITS")
if len(p) == 2:
p[0] = p[1]
elif len(p) == 3:
p[0] = p[1] ** float(p[2])
elif len(p) == 4:
if isinstance(p[2], int):
p[0] = p[1] * p[2] ** float(p[3])
|
nettitude/PoshC2
|
poshc2/client/command_handlers/SharpHandler.py
|
Python
|
bsd-3-clause
| 21,566
| 0.002782
|
import base64, re, traceback, os, string, subprocess
from prompt_toolkit import PromptSession
from prompt_toolkit.history import FileHistory
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
from prompt_toolkit.styles import Style
from poshc2.client.Alias import cs_alias, cs_replace
from poshc2.Colours import Colours
from poshc2.server.AutoLoads import check_module_loaded, run_autoloads_sharp
from poshc2.client.Help import sharp_help, allhelp
from poshc2.server.Config import PoshInstallDirectory, PoshProjectDirectory, SocksHost, PayloadsDirectory, ModulesDirectory
from poshc2.server.Config import PayloadCommsHost, DomainFrontHeader, UserAgent, PBindPipeName, PBindSecret, FCommFileName
from poshc2.Utils import argp, load_file, gen_key, get_first_url, get_first_dfheader
from poshc2.server.Core import print_bad, print_good
from poshc2.client.cli.CommandPromptCompleter import FilePathCompleter
from poshc2.server.payloads.Payloads import Payloads
from poshc2.server.PowerStatus import getpowerstatus
from poshc2.server.database.DB import new_task, kill_implant, get_implantdetails, get_sharpurls, get_baseenckey, get_powerstatusbyrandomuri
from poshc2.server.database.DB import select_item, update_label, get_allurls, get_c2server_all, get_newimplanturl, new_urldetails
def handle_sharp_command(command, user, randomuri, implant_id):
# alias mapping
for alias in cs_alias:
if alias[0] == command[:len(command.rstrip())]:
command = alias[1]
# alias replace
for alias in cs_replace:
if command.startswith(alias[0]):
command = command.replace(alias[0], alias[1])
original_command = command
command = command.strip()
run_autoloads_sharp(command, randomuri, user)
if command.startswith("searchhelp"):
do_searchhelp(user, command, randomuri)
return
elif command.startswith("searchallhelp"):
do_searchallhelp(user, command, randomuri)
return
elif command.startswith("searchhistory"):
do_searchhistory(user, command, randomuri)
return
elif command.startswith("upload-file"):
do_upload_file(user, command, randomuri)
return
elif command.startswith("inject-shellcode"):
do_inject_shellcode
|
(user, command, randomuri)
return
elif command.startswith("m
|
igrate"):
do_migrate(user, command, randomuri)
return
elif command == "kill-implant" or command == "exit":
do_kill_implant(user, command, randomuri)
return
elif command == "sharpsocks":
do_sharpsocks(user, command, randomuri)
return
elif (command.startswith("stop-keystrokes")):
do_stop_keystrokes(user, command, randomuri)
return
elif (command.startswith("start-keystrokes")):
do_start_keystrokes(user, command, randomuri)
return
elif (command.startswith("get-keystrokes")):
do_get_keystrokes(user, command, randomuri)
return
elif (command.startswith("get-screenshotmulti")):
do_get_screenshotmulti(user, command, randomuri)
return
elif command.startswith("get-screenshot"):
do_get_screenshot(user, command, randomuri)
return
elif command == "getpowerstatus":
do_get_powerstatus(user, command, randomuri)
return
elif command == "stoppowerstatus":
do_stoppowerstatus(user, command, randomuri)
return
elif command.startswith("run-exe SharpWMI.Program") and "execute" in command and "payload" not in command:
do_sharpwmi_execute(user, command, randomuri)
return
elif (command.startswith("get-hash")):
do_get_hash(user, command, randomuri)
return
elif (command.startswith("enable-rotation")):
do_rotation(user, command, randomuri)
return
elif (command.startswith("safetykatz")):
do_safetykatz(user, command, randomuri)
return
elif command.startswith("loadmoduleforce"):
do_loadmoduleforce(user, command, randomuri)
return
elif command.startswith("loadmodule"):
do_loadmodule(user, command, randomuri)
return
elif command.startswith("listmodules"):
do_listmodules(user, command, randomuri)
return
elif command.startswith("modulesloaded"):
do_modulesloaded(user, command, randomuri)
return
elif command.startswith("pbind-connect"):
do_pbind_start(user, command, randomuri)
return
elif command.startswith("fcomm-connect"):
do_fcomm_start(user, command, randomuri)
return
elif command.startswith("dynamic-code"):
do_dynamic_code(user, command, randomuri)
return
elif command.startswith("startdaisy"):
do_startdaisy(user, command, randomuri)
return
elif command == "help":
do_help(user, command, randomuri)
return
else:
if command:
do_shell(user, original_command, randomuri)
return
def do_searchhelp(user, command, randomuri):
searchterm = (command).replace("searchhelp ", "")
helpful = sharp_help.split('\n')
for line in helpful:
if searchterm in line.lower():
print(Colours.GREEN + line)
def do_searchallhelp(user, command, randomuri):
searchterm = (command).replace("searchallhelp ", "")
for line in allhelp:
if searchterm in line.lower():
print(Colours.GREEN + line)
def do_searchhistory(user, command, randomuri):
searchterm = (command).replace("searchhistory ", "")
with open('%s/.implant-history' % PoshProjectDirectory) as hisfile:
for line in hisfile:
if searchterm in line.lower():
print(Colours.GREEN + line.replace("+", ""))
def do_upload_file(user, command, randomuri):
# TODO lots of common code
source = ""
destination = ""
if command == "upload-file":
style = Style.from_dict({
'': '#80d130',
})
session = PromptSession(history=FileHistory('%s/.upload-history' % PoshProjectDirectory), auto_suggest=AutoSuggestFromHistory(), style=style)
try:
source = session.prompt("Location file to upload: ", completer=FilePathCompleter(PayloadsDirectory, glob="*"))
source = PayloadsDirectory + source
except KeyboardInterrupt:
return
while not os.path.isfile(source):
print("File does not exist: %s" % source)
source = session.prompt("Location file to upload: ", completer=FilePathCompleter(PayloadsDirectory, glob="*"))
source = PayloadsDirectory + source
destination = session.prompt("Location to upload to: ")
else:
args = argp(command)
source = args.source
destination = args.destination
try:
destination = destination.replace("\\", "\\\\")
print("")
print("Uploading %s to %s" % (source, destination))
uploadcommand = f"upload-file {source} {destination}"
new_task(uploadcommand, user, randomuri)
except Exception as e:
print("Error with source file: %s" % e)
traceback.print_exc()
def do_inject_shellcode(user, command, randomuri):
params = re.compile("inject-shellcode", re.IGNORECASE)
params = params.sub("", command)
style = Style.from_dict({
'': '#80d130',
})
session = PromptSession(history=FileHistory('%s/.shellcode-history' % PoshProjectDirectory), auto_suggest=AutoSuggestFromHistory(), style=style)
try:
path = session.prompt("Location of shellcode file: ", completer=FilePathCompleter(PayloadsDirectory, glob="*.bin"))
path = PayloadsDirectory + path
except KeyboardInterrupt:
return
try:
shellcodefile = load_file(path)
if shellcodefile is not None:
new_task("run-exe Core.Program Core Inject-Shellcode %s%s #%s" % (base64.b64encode(shellcodefile).decode("utf-8"), params, os.path.basename(path)), user, randomuri)
except Exception as e:
print("Error loading file: %s" % e)
def do_migrate(user, command, randomuri):
params = re.compile("migrate", re.IGNORECASE)
|
anpe9592/projectEuler
|
11-20/problem12.py
|
Python
|
mit
| 273
| 0.007326
|
#
|
problem12.py
import math
x = 0
z = 0
n = 1
i = True
while i != False:
z = n * (n + 1) / 2
x = 2
y = int(math.sqrt(z))
for k in range(2, y):
if z % k == 0:
x += 2
if x == 500:
i = False
n += 1
print(
|
z)
|
caspervg/pylex
|
src/pylex/user.py
|
Python
|
mit
| 1,830
| 0
|
import requests
from .route import Route
class UserRoute(Route):
def me(self):
"""
Return the currently authenticated user
:rtype: dict
"""
return self._get_json('user')
def user(self, id):
"""
Return the user with given id
:rtype: dict
"""
return self._get_json('user/{0}', id)
def all(self, start=0, amount=10):
"""
Return a list of all users
:rtype: list
"""
return self._get_json('user/all', start=start, amount=amount)
def dl_history(self):
"""
Return the download history of the authenticated user
:rtype: dict
"""
return self._get_json('user/download-history')
def dl_list(self):
"""
Return the download (later) list of the authenticated user
:rtype: dict
"""
return self._get_json('user/download-list')
def register(self, username, password, email, full_name):
"""
Registers a new user on the LEX
:rtype: None
"""
if all([username, password, email, full_name]):
r = requests.post(self._base + 'user/register', params={
'username': username,
'password_1': password,
'password_2': password,
|
'email': email,
'fullname': full_name
})
r.raise_for_status()
else:
raise Exception('None of th
|
e arguments may be "None"')
def activate(self, key):
"""
Activates a new registree on the LEX with given activation key
:rtype: None
"""
url = self._base + 'user/activate'
r = requests.get(url, params={
'activation_key': key
})
r.raise_for_status()
|
zach-morris/plugin.program.iarl
|
resources/lib/historydat_parser.py
|
Python
|
gpl-2.0
| 6,720
| 0.001637
|
from __future__ import print_function
import sys
import re
import string
#TODO:
# * fail if an unexpected state occurs
_verbose = False
_echo_file = False
class Game:
regex = re.compile(r'(.+)\s+\(c\)\s+([0-9]+)\s+(.+)')
def __init__(self, systems, romnames):
self.systems = systems
self.romnames = romnames
self.bio = []
self.name = None
self.publisher = None
self.year = None
def _filter_line(self, line):
return filter(lambda x: x in string.printable, line)
def _add_to_bio(self, line):
line = self._filter_line(line)
self.bio.append(line)
# name information is on the second line of the bio
if self.name is None and len(self.bio) == 2:
parsed = self._parse_name_info(line)
if parsed is not None:
self.name = parsed[0]
self.year = parsed[1]
self.publisher = parsed[2]
def _parse_name_info(self, line):
match = self.regex.match(line.strip())
if match is not None:
groups = match.groups()
if len(groups) == 3:
return groups
if _verbose:
print('Failed to parse info line:')
print(line)
return None
def get_bio(self):
return ''.join(self.bio)
class StateInfo:
def __init__(self, state):
self.state = state
STATE_END, STATE_GAME, STATE_BIO = range(3)
class HistDatParser:
_known_systems = {
'snes': 'Super Nintendo',
'nes': 'Nintendo Entertainment System',
'info': 'Unknown game system',
'gba': 'Gameboy Advance',
'n64': 'Nintendo 64',
'gbcolor': 'Gameboy Color',
'sg1000': 'Sega Game 1000',
'cpc_cass': 'Amstrad CPC (Cassette)',
'cpc_flop': 'Amstrad CPC (Floppy)',
'bbca_cas': 'BBC Micro A (Cassette)',
'megadriv': 'Sega Megadrive',
'channelf': 'Fairchild Channel F',
'a7800': 'Atari 7800',
'a2600': 'Atari 2600',
'crvision': '',
'cdi': '',
'coleco': '',
'neogeo': '',
'scv': '',
'pcecd': '',
'msx2_cart': '',
'sms': '',
'neocd': '',
'vc4000': '',
'studio2': '',
'pce': '',
'saturn,': '',
'sat_cart': '',
'aquarius': '',
'gamegear': '',
'coco_cart': '',
'xegs': '',
'x68k_flop': '',
'gameboy': '',
'alice32': '',
'a5200': '',
'a800': '',
'advision': '',
'c64_cart': '',
'c64_flop': '',
'mac_flop': '',
'mac_hdd': '',
'arcadia': '',
'apfm1000': '',
'apple2gs': '',
'famicom_flop': '',
'intv': '',
'alice90': '',
'lynx': '',
'msx1_cart': '',
'megacd': '',
'megacdj': ''
}
_unknown_systems = set()
def __init__(self, filename):
self.datfile = open(filename)
self._games_by_gamekey = {}
self._parse()
TOKEN_GAMEID, TOKEN_BIO, TOKEN_END = range(3)
def _parse_token(self, line):
parsed = None
if line[0] is '$':
line = line[1:]
if line.strip() == 'end':
parsed = [self.TOKEN_END]
elif line.strip() == 'bio':
parsed = [self.TOKEN_BIO]
else:
eqIdx = line.find('=')
if eqIdx is not -1:
systemsline = line[0:eqIdx]
parsed = []
parsed.append(self.TOKEN_GAMEID)
systems = systemsline.strip().split(',')
for system in systems:
try:
self._known_systems.has_key(system)
except ValueError:
self._unknown_systems.add(system)
parsed.append(systems)
line = line[eqIdx + 1:]
romnames = line.strip().split(',')
romnames = [rom.strip()
for rom in romnames if len(rom) > 0]
parsed.append(romnames)
return parsed
def _parse(self):
state_info = StateInfo(StateInfo.STATE_END)
for line in self.datfile:
if _echo_file:
print(line, end='')
parsed = self._parse_token(line)
if state_info.state is StateInfo.STATE_END:
if parsed is not None:
if parsed[0] is self.TOKEN_GAMEID:
game = self._add_game(parsed)
state_info = StateInfo(StateInfo.STATE_GAME)
state_info.game = game
elif parsed[0] is self.TOKEN_END:
continue
else:
raise Exception('Expected a new system after $end')
elif state_info.state is StateInfo.STATE_GAME:
if parsed is not None:
if parsed[0] is self.TOKEN_BIO:
game = state_info.game
state_info = StateInfo(S
|
tateInfo.STATE_BIO)
state_info.game = game
elif state_info.state is StateInfo.STATE_BIO:
if parsed is not None:
if parsed[0] is self.TOKEN_END:
|
state_info = StateInfo(StateInfo.STATE_END)
else:
state_info.game._add_to_bio(line)
else:
raise Exception('Unexpected parse state')
if _verbose:
if len(self._unknown_systems) > 0:
print("Found unknown game systems:")
for system in self._unknown_systems:
print(system)
def _get_gamekey(self, system, romname):
return '{0}_{1}'.format(system, romname)
def _add_game(self, parsed):
assert parsed[0] is HistDatParser.TOKEN_GAMEID
systems = parsed[1]
romnames = parsed[2]
game = Game(systems, romnames)
for system in systems:
for romname in romnames:
key = self._get_gamekey(system, romname)
self._games_by_gamekey[key] = game
return game
def get_game(self, system, romname):
key = self._get_gamekey(system, romname)
if self._games_by_gamekey.has_key(key):
return self._games_by_gamekey[key]
return None
if __name__ == '__main__':
filename = sys.argv[1]
parser = HistDatParser(filename)
g = parser.get_game('info', 'dkong')
print(g.get_bio())
|
chipaca/snapcraft
|
tests/unit/store/test_store_client.py
|
Python
|
gpl-3.0
| 63,982
| 0.000797
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2016-2021 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import json
import logging
import os
import pathlib
import tempfile
from textwrap import dedent
from unittest import mock
import fixtures
from testtools.matchers import Contains, Equals, FileExists, Is, IsInstance, Not
import tests
from snapcraft import storeapi
from snapcraft.storeapi import errors, http_clients
from snapcraft.storeapi.v2 import channel_map, releases, whoami
from tests import fixture_setup, unit
class StoreTestCase(unit.TestCase):
def setUp(self):
super().setUp()
self.fake_store = self.useFixture(fixture_setup.FakeStore())
self.client = storeapi.StoreClient()
class LoginTestCase(StoreTestCase):
def test_login_successful(self):
self.client.login(email="dummy email", password="test correct password")
self.assertIsNotNone(self.client.auth_client.auth)
def test_login_successful_with_one_time_password(self):
self.client.login(
email="dummy email",
password="test correct password",
otp="test correct one-time password",
)
self.assertIsNotNone(self.client.auth_client.auth)
def test_login_successful_with_package_attenuation(self):
self.client.login(
email="dummy email",
password="test correct password",
packages=[{"name": "foo", "series": "16"}],
)
self.assertIsNotNone(self.client.auth_client.auth)
def test_login_successful_with_channel_attenuation(self):
self.client.login(
email="dummy email", password="test correct password", channels=["edge"]
)
self.assertIsNotNone(self.client.auth_client.auth)
def test_login_successful_fully_attenuated(self):
self.client.login(
email="dummy email",
password="test correct password",
packages=[{"name": "foo", "series": "16"}],
channels=["edge"],
save=False,
)
# Client configuration is filled, but it's not saved on disk.
self.assertThat(
self.client.auth_client._conf._get_config_path(), Not(FileExists())
)
self.assertIsNotNone(self.client.auth_client.auth)
def test_login_successful_with_expiration(self):
self.client.login(
email="dummy email",
password="test correct password",
packages=[{"name": "foo", "series": "16"}],
channels=["edge"],
expires="2017-12-22",
)
self.assertIsNotNone(self.client.auth_client.auth)
def test_login_with_exported_login(self):
with pathlib.Path("test-exported-login").open("w") as config_fd:
print(
"[{}]".format(self.client.auth_client._conf._get_section_name()),
file=config_fd,
)
print(
"macaroon=MDAwZWxvY2F0aW9uIAowMDEwaWRlbnRpZmllciAKMDAxNGNpZCB0ZXN0IGNhd
|
mVhdAowMDE5dmlkIHRlc3QgdmVyaWZpYWNpb24KMDAxN2NsIGxvY2FsaG9zdDozNTM1MQowMDBmc2lnbmF0dXJlIAo",
file=config_fd,
)
print(
"unbound_discharge=MDAwZWxvY2F0aW9uIAowMDEwaWRlbnRpZmllciAKMDAwZnNpZ25hdHVyZSAK",
file=config_fd,
)
config_fd.flush()
with pathlib.Path("test-exported-login").open() as config_fd:
|
self.client.login(config_fd=config_fd)
self.assertThat(
self.client.auth_client._conf.get("macaroon"),
Equals(
"MDAwZWxvY2F0aW9uIAowMDEwaWRlbnRpZmllciAKMDAxNGNpZCB0ZXN0IGNhdmVhdAowMDE5dmlkIHRlc3QgdmVyaWZpYWNpb24KMDAxN2NsIGxvY2FsaG9zdDozNTM1MQowMDBmc2lnbmF0dXJlIAo"
),
)
self.assertThat(
self.client.auth_client._conf.get("unbound_discharge"),
Equals("MDAwZWxvY2F0aW9uIAowMDEwaWRlbnRpZmllciAKMDAwZnNpZ25hdHVyZSAK"),
)
self.assertThat(
self.client.auth_client.auth,
Equals(
"Macaroon root=MDAwZWxvY2F0aW9uIAowMDEwaWRlbnRpZmllciAKMDAxNGNpZCB0ZXN0IGNhdmVhdAowMDE5dmlkIHRlc3QgdmVyaWZpYWNpb24KMDAxN2NsIGxvY2FsaG9zdDozNTM1MQowMDBmc2lnbmF0dXJlIAo, discharge=MDAwZWxvY2F0aW9uIAowMDEwaWRlbnRpZmllciAKMDAyZnNpZ25hdHVyZSDmRizXTOkAmfmy5hGCm7F0H4LBea16YbJYVhDkAJZ-Ago"
),
)
def test_failed_login_with_wrong_password(self):
self.assertRaises(
http_clients.errors.StoreAuthenticationError,
self.client.login,
email="dummy email",
password="wrong password",
)
def test_failed_login_requires_one_time_password(self):
self.assertRaises(
http_clients.errors.StoreTwoFactorAuthenticationRequired,
self.client.login,
email="dummy email",
password="test requires 2fa",
)
def test_failed_login_with_wrong_one_time_password(self):
self.assertRaises(
http_clients.errors.StoreAuthenticationError,
self.client.login,
email="dummy email",
password="test correct password",
otp="wrong one-time password",
)
def test_failed_login_with_unregistered_snap(self):
raised = self.assertRaises(
errors.GeneralStoreError,
self.client.login,
email="dummy email",
password="test correct password",
packages=[{"name": "unregistered-snap-name", "series": "16"}],
)
self.assertThat(str(raised), Contains("not found"))
class DownloadTestCase(StoreTestCase):
# sha3-384 of tests/data/test-snap.snap
EXPECTED_SHA3_384 = ""
def test_download_nonexistent_snap_raises_exception(self):
self.client.login(email="dummy", password="test correct password")
raised = self.assertRaises(
errors.SnapNotFoundError,
self.client.download,
"nonexistent-snap",
risk="stable",
download_path="dummy.snap",
arch="test-arch",
)
self.expectThat(raised._snap_name, Equals("nonexistent-snap"))
self.expectThat(raised._channel, Is(None))
self.expectThat(raised._arch, Is(None))
def test_download_snap(self):
fake_logger = fixtures.FakeLogger(level=logging.INFO)
self.useFixture(fake_logger)
self.client.login(email="dummy", password="test correct password")
download_path = os.path.join(self.path, "test-snap.snap")
self.client.download("test-snap", risk="stable", download_path=download_path)
self.assertThat(download_path, FileExists())
def test_download_snap_missing_risk(self):
fake_logger = fixtures.FakeLogger(level=logging.INFO)
self.useFixture(fake_logger)
self.client.login(email="dummy", password="test correct password")
raised = self.assertRaises(
errors.SnapNotFoundError,
self.client.download,
"test-snap",
risk="beta",
download_path="dummy.snap",
)
self.expectThat(raised._snap_name, Equals("test-snap"))
self.expectThat(raised._channel, Equals("beta"))
self.expectThat(raised._arch, Is(None))
def test_download_from_brand_store_requires_store(self):
self.client.login(email="dummy", password="test correct password")
raised = self.assertRaises(
errors.SnapNotFoundError,
self.client.download,
"test-snap-brand-store",
risk="stable",
download_path="dummy.sna
|
marvinpinto/charlesbot
|
tests/slack/test_slack_base_object_children.py
|
Python
|
mit
| 1,314
| 0
|
import unittest
from charlesbot.slack.slack_channel_joined import SlackChannelJoined
from charlesbot.slack.slack_channel_left import SlackChannelLeft
from charlesbot.slack.slack_group_joined import SlackGroupJoined
from charlesbot.slack.slack_group_left import SlackGroupLeft
from charlesbot.slack.slack_message import SlackMessage
class TestSlackBaseObjectChildren(unittest.TestCase):
def test_slack_channel_joined_compatibility(self):
sc = SlackChannelJoined()
object_dict = {"type": "channel_joined"}
self.assertTrue(sc.is_comp
|
atible(object_dict))
def test_slack_channel_left_compatibility(self):
sc = SlackChannelLeft()
object_dict = {"type": "channel_left"}
self.assertTrue(sc.is_compatible(object_dict))
def test_slack_group_joined_compatibility(
|
self):
sc = SlackGroupJoined()
object_dict = {"type": "group_joined"}
self.assertTrue(sc.is_compatible(object_dict))
def test_slack_group_left_compatibility(self):
sc = SlackGroupLeft()
object_dict = {"type": "group_left"}
self.assertTrue(sc.is_compatible(object_dict))
def test_slack_message_compatibility(self):
sc = SlackMessage()
object_dict = {"type": "message"}
self.assertTrue(sc.is_compatible(object_dict))
|
obi-two/Rebelion
|
data/scripts/templates/object/draft_schematic/space/armor/shared_mass_reduction_kit_mk4.py
|
Python
|
mit
| 463
| 0.047516
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
|
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/space/armor/shared_mass_reduction_kit_mk4.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### E
|
ND MODIFICATIONS ####
return result
|
pombredanne/django-avocado
|
avocado/store/tests/models.py
|
Python
|
bsd-3-clause
| 4,138
| 0.000483
|
from django.test import TestCase
from django.core.cache import cache as mcache
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from avocado.store.models import Scope, Perspective, Report
__all__ = ('ScopeTestCase', 'PerspectiveTestCase', 'ReportTestCase')
class ScopeTestCase(TestCase):
fixtures = ['test_data.yaml']
def setUp(self):
self.user = User.objects.get(pk=1)
self.context = Scope()
def test_is_valid(self):
self.assertTrue(self.context.is_valid({}))
self.assertFalse(self.context.is_valid([]))
class dict2(dict):
pass
self.assertTrue(self.context.is_valid(dict2()))
def test_has_permission(self):
self.assertTrue(self.context.has_permission())
self.assertTrue(self.context.has_permission(user=self.user))
node = {
'type': 'AND',
'children': [{
'id': 5,
'operator': 'exact',
'value': 10
}, {
'id': 3,
'operator': 'exact',
'value': True
}]
}
self.assertTrue(self.context.has_permission(node))
self.assertTrue(self.context.has_permission(node, self.user))
node['children'][0]['id'] = 1
self.assertFalse(self.context.has_permission(node))
self.assertTrue(self.context.has_permission(node, self.user))
node = {}
self.assertTrue(self.context.has_permission(node))
self.assertTrue(self.context.has_permission(node, self.user))
node = {
'id': 3,
'operator': 'exact',
'value': True
}
self.assertTrue(self.context.has_permission(node))
self.assertTrue(self.context.has_permission(node, self.user))
class PerspectiveTestCase(TestCase):
fixtures = ['test_data.yaml']
def setUp(self):
self.user = User.objects.get(pk=1)
self.context = Perspective()
def test_is_valid(self):
self.assertTrue(self.context.is_valid({}))
self.assertFalse(self.context.is_valid([]))
class dict2(dict):
pass
self.assertTrue(self.context.is_valid(dict2()))
def test_has_permission(self):
self.assertTrue(self.context.has_permission())
self.assertTrue(self.context.has_permission(user=self.user))
node = {}
self.assertTrue(self.context.has_permission(node))
self.assertTrue(self.context.has_permission(node, self.user))
node = {'columns': [1]}
self.assertFalse(self.context.has_permission(node))
self.assertFalse(self.context.has_permission(node, self.user))
node = {'ordering': [(1, 'desc')]}
self.assertFalse(self.context.has_permission(node))
self.assertFalse(self.context.has_permission(node, self.user))
class Object(object):
pass
class ReportTestCase(TestCase):
fixtures = ['test_data.yaml']
def set
|
Up(self):
mcache.clear()
self.user = User.objects.get(id=1)
self.client.login(username='foo', password='foo')
self.request = Object()
self.request.user = self.user
self.request.session = self.client.session
self.report = Report()
self.report.scope = Scope()
self
|
.report.perspective = Perspective()
def test_resolve_caching(self):
session = self.request.session
self.report.resolve(self.request, 'html')
cache = session[Report.REPORT_CACHE_KEY]
ts1 = cache['timestamp']
self.report.resolve(self.request, 'html')
ts2 = cache['timestamp']
self.report.resolve(self.request, 'html', page_num=100)
ts3 = cache['timestamp']
self.report.resolve(self.request, 'html', per_page=1)
ts4 = cache['timestamp']
self.report.resolve(self.request, 'html', per_page=1, page_num=1)
ts5 = cache['timestamp']
class SessionFlowTestCase(TestCase):
def test_new_session(self):
resp = self.client.get(reverse('workspace'))
print resp.request.session
|
primecloud-controller-org/pcc-cli
|
src/pcc/api/instance/stop_all_instance.py
|
Python
|
apache-2.0
| 327
| 0.006116
|
# -*- co
|
ding: utf-8 -*-
def command():
return "stop-all-instance"
def init_argument(parser):
parser.add_argument("--farm-no", required=True)
def execute(requester, args):
farm_no = args.farm_no
parameters = {}
parameters["FarmNo"] = farm_no
return requester.execute("/StopAllInstan
|
ce", parameters)
|
nsalomonis/AltAnalyze
|
import_scripts/filterFASTA.py
|
Python
|
apache-2.0
| 619
| 0.017771
|
from Bio import SeqIO
import sys, string
fasta_file = "/Users/saljh8/GitHub
|
/altanalyze/AltDatabase/EnsMart72/Hs/SequenceData/Homo_sapiens.GRCh37.72.cdna.all.fa" # Input fasta file
result_file = "/Users/saljh8/GitHub/altanalyze/AltDatabase/EnsMart72/Hs/SequenceData/Homo_sapiens.GRCh37.72.cdna.all.filtered.fa" # Output fasta file
fasta_sequences = SeqIO.parse(open(fasta_file),'fasta')
with open(result_file, "w") as f:
for seq in fasta_sequences:
chr = string.split(seq.description,':')[3]
try:
float(chr)
|
SeqIO.write([seq], f, "fasta")
except: continue
|
shagabutdinov/sublime-method
|
method.py
|
Python
|
mit
| 6,962
| 0.020684
|
import sublime
import sublime_plugin
import re
from xml.dom import minidom
try:
from Expression import expression
except ImportError as error:
sublime.error_message("Dependency import failed; please read readme for " +
"Method plugin for installation instructions; to disable this " +
"message remove this plugin; message: " + str(error))
raise error
indented_languages = ['python', 'coffee', 'saas']
def extract_method(view, point = None):
if point == None:
point = view.sel()[0].begin()
result = None
for index, method in enumerate(extract_methods(view)):
if point > method['start']:
result = method
return result
def extract_methods(view):
methods = []
text = view.substr(sublime.Region(0, view.size()))
lines = text.split("\n")
for index, word in enumerate(view.find_by_selector('entity.name.function')):
start, body_start, privacy = _get_method_start(view, lines, word)
end, body_end = _get_method_end(view, lines, start, body_start)
methods.append({
'index': index,
'name': view.substr(word),
'start': start,
'end': end,
'body_start': body_start,
'body_end': body_end,
'privacy': privacy,
})
return methods
def get_regions(view, method):
start_line = view.line(method['start'])
comment_point = start_line.a - 2
if 'comment' in view.scope_name(comment_point):
comment_start_point = view.extract_scope(comment_point).a
start_line.a = view.line(comment_start_point).a
region = sublime.Region(start_line.a, method['end'])
delete = sublime.Region(region.a, region.b + 1)
while True:
if delete.a <= 0:
break
previous = view.line(delete.a - 1)
if view.substr(previous).strip() == '':
delete.a = previous.a
else:
break
view_size = view.size()
while True:
if delete.b >= view_size:
delete.b = view_size
break
next = view.line(delete.b)
if view.substr(next).strip() == '':
delete.b = next.b + 1
else:
break
# delete.b += 1
lines = ''
# if index != 0 and index != len(methods) - 1:
lines += "\n"
return region, delete, lines
def _get_method_start(view, lines, name_range):
start_line, _ = view.rowcol(name_range.a)
start_col = re.search(r'^(\s*)', lines[start_line]).end(1)
start = view.text_point(start_line, start_col)
body_start = name_range.b
if '(' in lines[start_line]:
body_start = _skip_parenthesis_or_return_args(view, body_start)
# check { or : right after body_start
body_start_match = re.search(r'^\s*[{:]', view.substr(
sublime.Region(body_start, body_start + 64)))
if body_start_match != None:
body_start += body_start_match.end(0)
# strip whitespaces
body_start += re.search(r'^\s*', view.substr(sublime.Region(body_start,
body_start + 64))).end(0)
return start, body_start, _get_method_privacy(lines, start_line)
def _skip_parenthesis_or_return_args(view, point):
new_point = point
while True:
new_point = _skip_parenthesis(view, point)
if new_point == point:
break
point = new_point
if 'source.go' in view.scope_name(point):
match = expression.find_match(view, point, r'{',
{'range': [point, point + 128]})
if match != None:
new_point += match.end(0)
return new_point
def _skip_parenthesis(view, point):
if '(' not in view.substr(sublime.Region(point, view.line(point).b)):
return point
match = expression.find_match(view, point, r'\)',
{'range': [point, point + 512], 'nesting': 'end'})
if match == None:
return point
return match.end(0) + point
def _get_method_end(view, lines, start, body_start):
empty = re.search(r'^\s*(\}|end)', view.substr(sublime.Region(body_start,
body_start + 64)))
if empty:
return body_start + empty.end(0), body_start + empty.start(0)
start_line, _ = view.rowcol(body_start)
end_line = _method_method_end_line(view, lines, start_line)
end = view.text_point(end_line, len(lines[end_line]))
if _is_indented(view, start):
body_end = end
else:
point = view.text_point(end_line, 0)
end_line_range = view.line(point)
body_end = end_line_range.a + re.search(r'^\s*', lines[end_line]).end(0)
# stip body end
body_end -= len(re.search(r'(\s*)$', view.substr(sublime.Region(body_end - 64,
body_end))).group(0))
return end, body_end
def _method_method_end_line(view, lines, start_line):
previous_line_index, end_line = None, None
source_indentation = len(re.match(r'^(\s*)', lines[start_line]).group(0))
for line_index in range(start_line, len(lines)):
line = lines[line_index]
if line.strip() == '':
continue
current_indentation = len(re.match(r'^(\s*)', line).group(0))
scope_name = view.scope_name(view.text_point(line_index, 0))
if 'string' in scope_name or 'comment' in scope_name:
continue
end_found = (current_indentation < source_indentation and
not line.strip().startswith('rescue')) # ruby rescue hack
if end_found:
end_line_token = line.strip()
if end_line_token == 'end' or end_line_token[0] == '}':
end_line = line_index
else:
end_line = previous_line_index
break
previous_line_index = line_index
if end_line == None:
end_line = len(lines) - 1
return end_line
def _is_indented(view, point):
scope = view.scope_name(point)
for language in indented_languages:
if language in scope:
return True
return False
def _get_method_privacy(lines, line):
if 'public' in lines[line]:
return 'public'
if 'protected' in lines[line]:
return 'protected'
if 'private' in lines[line]:
return 'private'
privacies = ['public', 'protected', 'private']
for index in reversed(range(0, line)):
stripped = lines[index].strip()
if stripped in privacies:
return stripped
return 'public'
def get_method_insert_snippet(language, null = False, filename = None):
|
if filename == None:
if null:
filename = language + '-method-call-null.sublime-snippet'
else:
filename = language + '-method-call.sublime-snippet'
return _get_snippet_body(filename)
def get_method_snippet(langua
|
ge, filename = None):
if filename == None:
filename = language + '-method.sublime-snippet'
return _get_snippet_body(filename)
def _get_snippet_body(filename):
snippets = sublime.find_resources(filename)
if len(snippets) == 0:
raise Exception('Snippet "' + filename + '" not found')
snippet = sublime.load_resource(snippets[0])
if snippet.strip() == '':
return None
xml = minidom.parseString(snippet).firstChild
if xml == None:
return None
snippet = {}
for node in xml.childNodes:
if node.nodeType == node.ELEMENT_NODE:
value_node = node.firstChild
if value_node == None:
continue
if node.tagName == 'content':
return value_node.data.strip()
raise Exception('Snippet "' + name + '" is empty')
|
eiriniar/CellCnn
|
cellCnn/utils.py
|
Python
|
gpl-3.0
| 12,947
| 0.00363
|
""" Copyright 2016-2017 ETH Zurich, Eirini Arvaniti and Manfred Claassen.
This module contains utility functions.
"""
import os
import errno
from collections import Counter
import numpy as np
import pandas as pd
import copy
from cellCnn.downsample import random_subsample, kmeans_subsample, outlier_subsample
from cellCnn.downsample import weighted_subsample
import sklearn.utils as sku
from sklearn.metrics.pairwise import pairwise_kernels, pairwise_distances
from scipy.cluster.hierarchy import linkage
from scipy.cluster.hierarchy import fcluster
from scipy import stats
from scipy.sparse import coo_matrix
import flowio
try:
import igraph
except ImportError:
pass
# extra arguments accepted for backwards-compatibility (with the fcm-0.9.1 package)
def loadFCS(filename, *args, **kwargs):
f = flowio.FlowData(filename)
events = np.reshape(f.events, (-1, f.channel_count))
channels = []
for i in range(1, f.channel_count+1):
key = str(i)
if 'PnS' in f.channels[key] and f.channels[key]['PnS'] != u' ':
channels.append(f.channels[key]['PnS'])
elif 'PnN' in f.channels[key] and f.channels[key]['PnN'] != u' ':
channels.append(f.channels[key]['PnN'])
else:
channels.append('None')
return FcmData(events, channels)
class FcmData(object):
def __init__(self, events, channels):
self.channels = channels
self.events = events
self.shape = events.shape
def __array__(self):
return self.events
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def get_data(indir, info, marker_names, do_arcsinh, cofactor):
fnames, phenotypes = info[:, 0], info[:, 1]
sample_list = []
for fname in fnames:
full_path = os.path.join(indir, fname)
fcs = loadFCS(full_path, transform=None, auto_comp=False)
marker_idx = [fcs.channels.index(name) for name in marker_names]
x = np.asarray(fcs)[:, marker_idx]
if do_arcsinh:
x = ftrans(x, cofactor)
sample_list.append(x)
return sample_list, list(phenotypes)
def save_results(results, outdir, labels):
csv_dir = os.path.join(outdir, 'exported_filter_weights')
mkdir_p(csv_dir)
nmark = len(labels)
nc = results['w_best_net'].shape[1] - (nmark+1)
labels_ = labels + ['constant'] + ['out %d' % i for i in range(nc)]
w = pd.DataFrame(results['w_best_net'], columns=labels_)
w.to_csv(os.path.join(csv_dir, 'filters_best_net.csv'), index=False)
w = pd.DataFrame(results['selected_filters'], columns=labels_)
w.to_csv(os.path.join(csv_dir, 'filters_consensus.csv'), index=False)
w = pd.DataFrame(results['clustering_result']['w'], columns=labels_)
w.to_csv(os.path.join(csv_dir, 'filters_all.csv'), index=False)
def get_items(l, idx):
return [l[i] for i in idx]
def get_immediate_subdirectories(a_dir):
return [name for name in os.listdir(a_dir)
if os.path.isdir(os.path.join(a_dir, name))]
def ftrans(x, c):
return np.arcsinh(1./c * x)
def rectify(X):
return np.max(np.hstack([X.reshape(-1, 1), np.zeros((X.shape[0], 1))]), axis=1)
def relu(x):
return x * (x > 0)
def combine_samples(data_list, sample_id):
accum_x, accum_y = [], []
for x, y in zip(data_list, sample_id):
accum_x.append(x)
accum_y.append(y * np.ones(x.shape[0], dtype=int))
return np.vstack(accum_x), np.hstack(accum_y)
def keras_param_vector(params):
W = np.squeeze(params[0])
b = params[1]
W_out = params[2]
# store the (convolutional weights + biases + output weights) per filter
W_tot = np.hstack([W, b.reshape(-1, 1), W_out])
return W_tot
def representative(data, metric='cosine', stop=None):
if stop is None:
i = np.argmax(np.sum(pairwise_kernels(data, metric=metric), axis=1))
else:
i = np.argmax(np.sum(pairwise_kernels(data[:, :stop], metric=metric), axis=1))
return data[i]
def cluster_tightness(data, metric='cosine'):
centroid = np.mean(data, axis=0).reshape(1, -1)
return np.mean(pairwise_kernels(data, centroid, metric=metric))
def cluster_profiles(param_dict, nmark, accuracies, accur_thres=.99,
dendrogram_cutoff=.5):
accum = []
# if not at least 3 models reach the accuracy threshold, select the fi
|
lters from the 3 best
if np.sort(accuracies)[-3] < accur_thres:
accur_thres = np.sort(accuracies)[-3]
# combine filters from multiple models
for i, params in param_dict.items():
if accuracies[i] >= accur_thres:
W_tot = keras_param_vector(params)
accum.append(W_tot)
w_strong = np.v
|
stack(accum)
# perform hierarchical clustering on cosine distances
Z = linkage(w_strong[:, :nmark+1], 'average', metric='cosine')
clusters = fcluster(Z, dendrogram_cutoff, criterion='distance') - 1
c = Counter(clusters)
cons = []
for key, val in c.items():
if val > 1:
members = w_strong[clusters == key]
cons.append(representative(members, stop=nmark+1))
if cons != []:
cons_profile = np.vstack(cons)
else:
cons_profile = None
cl_res = {'w': w_strong, 'cluster_linkage': Z, 'cluster_assignments': clusters}
return cons_profile, cl_res
def normalize_outliers(X, lq=.5, hq=99.5, stop=None):
if stop is None:
stop = X.shape[1]
for jj in range(stop):
marker_t = X[:, jj]
low, high = np.percentile(marker_t, lq), np.percentile(marker_t, hq)
X[marker_t < low, jj] = low
X[marker_t > high, jj] = high
return X
def normalize_outliers_to_control(ctrl_list, list2, lq=.5, hq=99.5, stop=None):
X = np.vstack(ctrl_list)
accum = []
if stop is None:
stop = X.shape[1]
for xx in ctrl_list + list2:
for jj in range(stop):
marker_ctrl = X[:, jj]
low, high = np.percentile(marker_ctrl, lq), np.percentile(marker_ctrl, hq)
marker_t = xx[:, jj]
xx[marker_t < low, jj] = low
xx[marker_t > high, jj] = high
accum.append(xx)
return accum
## Utilities for generating random subsets ##
def filter_per_class(X, y, ylabel):
return X[np.where(y == ylabel)]
def per_sample_subsets(X, nsubsets, ncell_per_subset, k_init=False):
nmark = X.shape[1]
shape = (nsubsets, nmark, ncell_per_subset)
Xres = np.zeros(shape)
if not k_init:
for i in range(nsubsets):
X_i = random_subsample(X, ncell_per_subset)
Xres[i] = X_i.T
else:
for i in range(nsubsets):
X_i = random_subsample(X, 2000)
X_i = kmeans_subsample(X_i, ncell_per_subset, random_state=i)
Xres[i] = X_i.T
return Xres
def generate_subsets(X, pheno_map, sample_id, nsubsets, ncell,
per_sample=False, k_init=False):
S = dict()
n_out = len(np.unique(sample_id))
for ylabel in range(n_out):
X_i = filter_per_class(X, sample_id, ylabel)
if per_sample:
S[ylabel] = per_sample_subsets(X_i, nsubsets, ncell, k_init)
else:
n = nsubsets[pheno_map[ylabel]]
S[ylabel] = per_sample_subsets(X_i, n, ncell, k_init)
# mix them
data_list, y_list = [], []
for y_i, x_i in S.items():
data_list.append(x_i)
y_list.append(pheno_map[y_i] * np.ones(x_i.shape[0], dtype=int))
Xt = np.vstack(data_list)
yt = np.hstack(y_list)
Xt, yt = sku.shuffle(Xt, yt)
return Xt, yt
def per_sample_biased_subsets(X, x_ctrl, nsubsets, ncell_final, to_keep, ratio_biased):
nmark = X.shape[1]
Xres = np.empty((nsubsets, nmark, ncell_final))
nc_biased = int(ratio_biased * ncell_final)
nc_unbiased = ncell_final - nc_biased
for i in range(nsubsets):
print i
x_unbiased = random_subsample(X, nc_unbiased)
if (i % 100) == 0:
x_outlier, outlierness = outlier_subsample(X, x_ctrl, to_keep)
x_biased = weighted_subsample(x_outlier, outlierness
|
lukasmarshall/embedded-network-model
|
tariffs.py
|
Python
|
mit
| 22,618
| 0.014015
|
import numpy as np
import pandas as pd
import datetime
class Tariffs :
def __init__(self, scheme_name, retail_tariff_data_path, duos_data_path, tuos_data_path, nuos_data_path, ui_tariff_data_path):
self.scheme_name = scheme_name
self.retail_tariff_data_path = retail_tariff_data_path
self.duos_data_path = duos_data_path
self.tuos_data_path = tuos_data_path
self.nuos_data_path = nuos_data_path
# Get tariff data (note tuos not considered as yet)
self.retail_tariff_data = pd.read_csv(retail_tariff_data_path, index_col = ['offer_name'])
self.duos_tariff_data = pd.read_csv(duos_data_path, index_col = ['offer_name'])
self.tuos_tariff_data = pd.read_csv(tuos_data_path, index_col = ['offer_name'])
self.nuos_tariff_data = pd.read_csv(nuos_data_path, index_col = ['offer_name'])
# TODO - For testing ahead of integration with UI
self.ui_tariff_data_path = ui_tariff_data_path
self.ui_tariff_data = pd.read_csv(ui_tariff_data_path, index_col = ['gen_type'])
# Extract individual charges to reduce code below
self.local_solar_energy = self.ui_tariff_data.loc['local_solar','energy_charge']
self.local_solar_retail = self.ui_tariff_data.loc['local_solar','retail_charge']
self.local_solar_duos = self.ui_tariff_data.loc['local_solar','duos_charge']
self.central_battery_energy = self.ui_tariff_data.loc['central_battery','energy_charge']
self.central_battery_retail = self.ui_tariff_data.loc['central_battery','retail_charge']
self.central_battery_duos = self.ui_tariff_data.loc['central_battery','duos_charge']
self.central_battery_profit = self.ui_tariff_data.loc['central_battery','profit_charge']
self.central_battery_importing_ls_energy = self.ui_tariff_data.loc['central_battery_importing_local_solar','energy_charge']
self.central_battery_importing_ls_retail = self.ui_tariff_data.loc['central_battery_importing_local_solar','retail_charge']
self.central_battery_importing_ls_duos = self.ui_tariff_data.loc['central_battery_importing_local_solar','duos_charge']
# print(self.ui_tariff_data)
# print(self.retail_tariff_data)
# print(self.duos_tariff_data)
def get_variable_tariff(self, date_time, retail_tariff_type):
"""Variable tariff component from retail tariff data."""
# Get data from df
flat_charge = self.retail_tariff_data.loc[retail_tariff_type,'flat_charge']
peak_charge = self.retail_tariff_data.loc[retail_tariff_type,'peak_charge']
shoulder_charge = self.retail_tariff_data.loc[retail_tariff_type,'shoulder_charge']
offpeak_charge = self.retail_tariff_data.loc[retail_tariff_type,'offpeak_charge']
block_1_charge = self.retail_tariff_data.loc[retail_tariff_type,'block_1_charge']
block_2_charge = self.retail_tariff_data.loc[retail_tariff_type,'block_2_charge']
controlled_load = self.retail_tariff_data.loc[retail_tariff_type,'controlled_load']
peak_start_time = self.retail_tariff_data.loc[retail_tariff_type,'peak_start_time']
peak_end_time = self.retail_tariff_data.loc[retail_tariff_type,'peak_end_time']
peak_start_time_2 = self.retail_tariff_data.loc[retail_tariff_type,'peak_start_time_2']
peak_end_time_2 = self.retail_tariff_data.loc[retail_tariff_type,'peak_end_time_2']
shoulder_start_time = self.retail_tariff_data.loc[retail_tariff_type,'shoulder_start_time']
shoulder_end_time = self.retail_tariff_data.loc[retail_tariff_type,'shoulder_end_time']
shoulder_start_time_2 = self.retail_tariff_data.loc[retail_tariff_type,'shoulder_start_time_2']
shoulder_end_time_2 = self.retail_tariff_data.loc[retail_tariff_type,'shoulder_end_time_2']
block_1_volume = self.retail_tariff_data.loc[retail_tariff_type,'block_1_volume']
block_2_volume = self.retail_tariff_data.loc[retail_tariff_type,'block_2_volume']
demand_charge = self.retail_tariff_data.loc[retail_tariff_type,'demand']
tou_weekday_only_flag = self.retail_tariff_data.loc[retail_tariff_type, 'tou_weekday_only_flag']
if retail_tariff_type == 'Business Anytime':
variable_tariff = (block_1_charge, block_2_charge, block_1_volume)
elif retail_tariff_type == 'Business TOU':
variable_tariff = (peak_charge, shoulder_charge, offpeak_charge, peak_start_time, peak_end_time, peak_start_time_2, peak_end_time_2, shoulder_start_time, shoulder_end_time, shoulder_start_time_2, shoulder_end_time_2, tou_weekday_only_flag)
elif retail_tariff_type == 'Controlled Load 1':
variable_tariff = (controlled_load)
elif retail_tariff_type == 'Controlled Load 2':
variable_tariff = (controlled_load)
else:
raise ValueError('Retail tariff type not known:'+str(retail_tariff_type))
return variable_tariff
def get_local_solar_import_tariff(self,date_time):
"""Input in UI.
Is the amount which the Participant pays for local solar they consume."""
local_solar_import_tariff = self.local_solar_energy + self.local_solar_retail + self.local_solar_duos
return local_solar_import_tariff
def get_local_solar_export_tariff(self,date_time):
"""Input in UI.
Is the amount which the Participant is paid for local solar they generate."""
local_solar_export_tariff = self.local_solar_energy
return local_solar_export_tariff
def get_central_batt_tariff(self,date_time):
"""This is the tariff paid by the battery to the solar owner when importing solar. It should ONLY include energy and is what the participant RECEIVES."""
"""Input in UI"""
return self.central_battery_importing_ls_energy
def get_central_batt_buy_tariff(self,date_time):
"""This is the tariff paid by the participant to the battery when consuming battery export electricity."""
"""Input in UI"""
participant_central_battery_import_tariff = self.central_battery_energy + self.central_battery_retail + self.central_battery_duos + self.central_battery_profit
# print(participant_central_battery_import_tariff)
return participant_central_battery_import_tariff
def get_retail_solar_tariff(self,date_time, retail_tariff_type, solar_capacity):
"""Solar FiT component from retail tariff data."""
# Get solar threshold from retail data sheet
solar_capacity_threshold = self.retail_tariff_data.loc[retail_tariff_type,'sol
|
ar_cap_1']
# If below or equal to the threshold, return the relevant solar rate in $/kWh.
if solar_capacity <= solar_capacity_threshold:
retail_solar_tariff = self.retail_tariff_data.loc[retail_tariff_type,'solar_tariff_1']
# Else return the rate for systems above the threshold.
else :
retail_solar_tariff = self.retail_tariff_data.loc[retail_tariff_type,'solar_tariff_2']
return retail_so
|
lar_tariff
def get_fixed_tariff(self, fixed_period_minutes, retail_tariff_type):
"""Fixed tariff component from retail tariff data. Returns fixed value expressed per fixed period minutes (input)."""
fixed_tariff = self.retail_tariff_data.loc[retail_tariff_type,'daily_charge'] * (float(fixed_period_minutes)/float(60*24))
return fixed_tariff
# Things the network is paid (fixed DUOS charges, variable DUOS charges, local solar DUOS charges, central battery DUOS charges)
# Apply to amounts consumer each time period then sum for total network income
def get_duos_on_grid_import_fixed(self,fixed_period_minutes, duos_tariff_type):
fixed_tariff = self.duos_tariff_data.loc[duos_tariff_type,'daily_charge'] * (float(fixed_period_minutes)/float(60*24))
return fixed_tariff
def get_duos_on_grid_import_variable(self,date_time, duos_tariff_type):
"""Variable tariff component from DUOS tariff data."""
# Get data from df
flat_charge = self.duos_tariff_data.loc[duos_tariff_type,'flat_charge']
peak_charg
|
yuginboy/from_GULP_to_FEFF
|
feff/libs/determine_numbers_of_target_atoms.py
|
Python
|
gpl-3.0
| 2,071
| 0.005311
|
'''
* Created by Zhenia Syryanyy (Yevgen Syryanyy)
* e-mail: yuginboy@gmail.com
* License: this code is under GPL license
* Last modified: 2017-10-24
'''
import re
import os
class TargetAtom():
def __init__(self):
self.path_to_cfg_file = ''
self.atom_type = 'Mn'
self.number_of_target_atoms = None
def read_cfg_file(self):
pattern = self.atom_type + '='
self.number_of_target_atoms = None
if os.path.isfile(self.path_to_cfg_file):
with open(self.path_to_cfg_file, 'r') as f:
for line in f:
if pattern in line:
self.number_of_target_atoms = re.findall('(\d+)', line)[0]
self.number_of_target_atoms = int(self.number_of_target_atoms)
def create_cfg_file(self):
with open(self.path_to_cfg_file, 'w') as f:
out_line = '{}={}\n'.format(self.atom_type, self.number_of_target_atoms)
f.write(out_line)
def get_number_of_target_atoms(self):
if se
|
lf.number_o
|
f_target_atoms is None:
self.read_cfg_file()
if self.number_of_target_atoms is not None:
return int(self.number_of_target_atoms)
else:
return int(self.number_of_target_atoms)
def print_info(self):
txt = '==========================================\n'
txt += 'Number of target >> {} << atoms is [ {} ]\n'.format(self.atom_type, self.number_of_target_atoms)
txt += '==========================================\n'
print(txt)
if __name__ == '__main__':
print('-> you run ', __file__, ' file in a main mode')
obj = TargetAtom()
obj.atom_type = 'Mn'
obj.path_to_cfg_file = '/mnt/soliddrive/yugin/models/1Mn/best_models/MnI_caseB_Mn2[4]/atoms.cfg'
obj.read_cfg_file()
obj.print_info()
# obj.atom_type = 'Mn'
# obj.number_of_target_atoms = 2
# obj.path_to_cfg_file = '/mnt/soliddrive/yugin/models/1Mn/best_models/MnI_caseB_Mn2[4]/atoms.cfg'
# obj.create_cfg_file()
# obj.print_info()
|
Chilledheart/chromium
|
tools/telemetry/telemetry/value/skip_unittest.py
|
Python
|
bsd-3-clause
| 1,626
| 0.004305
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
#
|
found in the LICENSE file.
import os
import unittest
from telemetry import story
from telemetry import page as page_module
from telemetry import value
from telemetry.value import skip
class TestBase(unittest.TestCase):
def setUp(self):
story_set = story.StorySet(base_dir=os.path.dirname(__file__))
story_set.AddStory(
page_module.Page('ht
|
tp://www.bar.com/', story_set, story_set.base_dir))
self.story_set = story_set
@property
def pages(self):
return self.story_set.stories
class ValueTest(TestBase):
def testBuildbotAndRepresentativeValue(self):
v = skip.SkipValue(self.pages[0], 'page skipped for testing reason')
self.assertIsNone(v.GetBuildbotValue())
self.assertIsNone(v.GetBuildbotDataType(
value.COMPUTED_PER_PAGE_SUMMARY_OUTPUT_CONTEXT))
self.assertIsNone(v.GetChartAndTraceNameForPerPageResult())
self.assertIsNone(v.GetRepresentativeNumber())
self.assertIsNone(v.GetRepresentativeString())
def testAsDict(self):
v = skip.SkipValue(self.pages[0], 'page skipped for testing reason')
d = v.AsDictWithoutBaseClassEntries()
self.assertEquals(d['reason'], 'page skipped for testing reason')
def testFromDict(self):
d = {
'type': 'skip',
'name': 'skip',
'units': '',
'reason': 'page skipped for testing reason'
}
v = value.Value.FromDict(d, {})
self.assertTrue(isinstance(v, skip.SkipValue))
self.assertEquals(v.reason, 'page skipped for testing reason')
|
rtucker/sycamore
|
Sycamore/i18n/it.py
|
Python
|
gpl-2.0
| 19,046
| 0.026672
|
# -*- coding: iso-8859-1 -*-
# Text translations for Italiano (it).
# Automatically generated - DO NOT EDIT, edit it.po instead!
meta = {
'language': 'Italiano',
'maintainer': 'gian paolo ciceri <gp.ciceri@acm.org>',
'encoding': 'iso-8859-1',
'direction': 'ltr',
}
text = {
'''(last edited %(time)s by %(editor)s)''':
'''(l\'ultima modifica è del %(time)s, fatta da %(editor)s)''',
'''(last modified %s)''':
'''(modificata l\'ultima volta il %s)''',
'''The backupped content of this page is deprecated and will not be included in search results!''':
'''La copia di backup di questa pagina è deprecata e pertanto non verrà inclusa nella ricerca!''',
'''Version as of %(date)s''':
'''Versione del %(date)s''',
'''Redirected from page "%(page)s"''':
'''Redirezione dalla pagina "%(page)s"''',
'''This page redirects to page "%(page)s"''':
'''Questa pagina è rediretta alla pagina "%(page)s"''',
'''RefreshCache''':
'''AggiornaCache''',
'''Create this page''':
'''Crea questa pagina''',
'''Alternatively, use one of these templates:''':
'''In alternativa, puoi utilizzare uno di questi modelli:''',
'''To create your own templates, add a page with a name matching the regex "%(page_template_regex)s".''':
'''Per creare i tuoi modelli, aggiungi una pagina con il nome che corrisponda alla regex "%(page_template_regex)s".''',
'''The lock you held timed out, be prepared for editing conflicts!''':
'''Il lock che detenevi è spirato, preparati per conflitti di modifiche!''',
'''Edit "%(pagename)s"''':
'''Modifica "%(pagename)s"''',
'''Preview of "%(pagename)s"''':
'''Anteprima di "%(pagename)s"''',
'''Your edit lock on %(lock_page)s will expire in # minutes.''':
'''Il blocco alle modifiche sulla pagina %(lock_page)s verrà tolto in # minuti.''',
'''Your edit lock on %(lock_page)s will expire in # seconds.''':
'''Il blocco alle modifiche sulla pagina %(lock_page)s verrà tolto in # secondi.''',
'''Someone else deleted this page while you were editing!''':
'''Qualcun altro ha rimosso la pagina mentre la stavi modificando''',
'''Someone else changed this page while you were editing!''':
'''Qualcun altro ha cambiato la pagina mentre la stavi modificando''',
'''Someone else saved this page while you were editing!
Please review the page and save then. Do not save this page as it is!
Have a look at the diff of %(difflink)s to see what has been changed.''':
'''Qualcun altro ha salvato la pagina mentre la stavi modificandoPer favore, rivedi la pagina e salvala. Non salvarla così come è!
Dai un\'occhiata alle diff in %(difflink)s per vedere cosa è cambiato.''',
'''[Content of new page loaded from %s]''':
'''[Contenuto della nuova pagina caricato da %s]''',
'''[Template %s not found]''':
'''[Il modello %s non è stato trovato]''',
'''Reduce editor size''':
'''Riduci la dimensione dell\'editor''',
'''Skip to preview''':
'''Salta all\'anteprima''',
'''Describe %s here.''':
'''Inserisci una descrizione per %s''',
'''Optional comment about this change''':
'''Commento facoltativo alle modifiche apportate''',
'''<No addition>''':
'''<nessuna>''',
'''Make this page belong to category %(category)s''':
'''Associa questa pagina alla categoria %(category)s''',
'''Check Spelling''':
'''Controllo ortografico''',
'''Save Changes''':
'''Salva le modifiche''',
'''Cancel''':
'''Annulla''',
'''By hitting <strong>%(save_button_text)s</strong> you put your changes under the %(license_link)s.
If you don\'t want that, hit <strong>%(cancel_button_text)s</strong> to cancel your changes.''':
'''Dando il comando <strong>%(save_button_text)s</strong> tu rilasci le tue modifiche sotto la %(license_link)s.
Se non vuoi questo, premi <strong>%(cancel_button_text)s</strong> per cancellare le tue modifiche.''',
'''Preview''':
'''Anteprima''',
'''Send mail notification''':
'''Invia una mail di notifica''',
'''Remove trailing whitespace from each line''':
'''Rimuovi gli spazi in fondo a ogni riga di testo''',
'''Edit was cancelled.''':
'''Le modifiche sono state annullate.''',
'''No older revisions of the page stored, diff not available.''':
'''Non ci sono revisioni precedenti della pagina, non è possibile riassumere le differenze.''',
'''No differences found!
''':
'''Non ho riscontrato nessuna differenza!
''',
'''[%(sitename)s] Update of "%(pagename)s"''':
'''[%(sitename)s] La pagina "%(pagename)s" è stata aggiornata''',
'''Status of sending notification mails:''':
'''Risultato della spedizione delle email di notifica:''',
'''[%(lang)s] %(recipients)s: %(status)s''':
'''[%(lang)s] %(recipients)s: %(status)s''',
'''Nobody subscribed to this page, no mail sent.''':
'''Nessuno risulta abbonato a questa pagina, non spedisco alcuna mail.''',
'''The lock of %(owner)s timed out %(mins_ago)d minute(s) ago, and you were granted the lock for this page.''':
'''Il lock di %(owner)s è spirato %(mins_ago)d minute(s) fa, e ti è stato concesso il lock per questa pagina.''',
'''Other users will be <em>blocked</em> from editing this page until %(bumptime)s.''':
'''Gli altri utenti saranno <em>bloccati</em> nelle modifiche a questa pagina fino a %(bumptime)s.''',
'''Other users will be <em>warned</em> until %(bumptime)s that you are editing this page.''':
'''Gli altri utenti saranno <em>avvisati</em> fino a %(bumptime)s che stai modificando la paginas.''',
'''Use the Preview button to extend the locking period.''':
'''Usa il bottone Anteprima per estendere il periodo di locking.''',
'''This page was opened for editing or last previewed at %(timestamp)s by %(owner)s.<br>
<strong class="highlight">You should <em>refrain from editing</em> this page for at least another %(mins_valid)d minute(s),
to avoid editing conflicts.</strong><br>
To leave the editor, press the Cancel button.''':
'''Questa pagina risulta in modifica dalle %(times
|
tamp)s da parte di %(owner)s, o perlomeno ne ha richiesto un\'anteprima a quell\'ora.<br>
<strong class="highlight">Dovresti <em>evitare di modificare</em> questa pagina per almeno altri %(mins_valid)d m
|
inuti per non incorrere in probabili conflitti.</strong><br>
Premi il pulsante "Annulla" per lasciare l\'editor.''',
'''<unknown>''':
'''<informazione non disponibile>''',
'''Info''':
'''Informazioni''',
'''Edit''':
'''Modifica''',
'''UnSubscribe''':
'''Annulla sottoscrizione''',
'''Subscribe''':
'''Sottoscrivi''',
'''Raw''':
'''Non formattato''',
'''XML''':
'''XML''',
'''Home''':
'''Home''',
'''Up''':
'''Su''',
'''Unknown action''':
'''Azione sconosciuta''',
'''Can\'t work out query''':
'''Non riesco a impostare la query di ricerca''',
'''Open editor on double click''':
'''Con un doppio click, apri l\'editor''',
'''Remember last page visited''':
'''Ricorda l\'ultima pagina visitata''',
'''Show emoticons''':
'''Mostra faccine''',
'''Show fancy links''':
'''Enfatizza i collegamenti esterni''',
'''Show question mark for non-existing pagelinks''':
'''Mostra un punto di domanda al posto dei collegamenti non esistenti''',
'''Show page trail''':
'''Mostra il pié di pagina''',
'''Show icon toolbar''':
'''Mostra la barra con le icone''',
'''Show top/bottom links in headings''':
'''Mostra i collegamenti all\'inizio e alla fine della pagina nelle intestazioni''',
'''Show fancy diffs''':
'''Evidenzia le differenze''',
'''Add spaces to displayed wiki names''':
'''Aggiungi uno spazio tra le singole parole dei nomi wiki''',
'''Remember login information forever''':
'''Ricorda le mie informazioni di login per sempre''',
'''Disable this account forever''':
'''Disabilita questa utenza per sempre''',
'''This wiki is not enabled for mail processing. Contact the owner of the wiki, who can either enable email, or remove the "Subscribe" icon.''':
'''questo wiki non è abilitato ad usare la mail. Contatta il gestore del wiki, o chi può abilitare la mail, oppure togli l\'icona "Subscribe".''',
'''Please enter a user name!''':
'''Inserisci un nome utente''',
'''User name already exists!''':
'''Il nome utente esiste gia!''',
'''Passwords don\'t match!''':
'''Le password non coincidono!''',
'''Default''':
'''Prestabilito''',
'''<Browser setting>''':
'''<Impostazioni del browser>''',
'''Logout''':
'''Logout''',
'''Name''':
'''Nome''',
'''(Use FirstnameLastname)''':
'''(Usa la forma NomeCognome)''',
'''Password repea
|
dilworm/pytest
|
redisinfo/redisclient.py
|
Python
|
gpl-2.0
| 5,692
| 0.008784
|
#-*-coding=utf8-*-
import asyncore, socket, time
import redisproto as rp
import threading
import traceback
import Queue,logging
logger = logging.getLogger("cf")
CONN_TIMEOUT = 15
class RedisClient(asyncore.dispatcher): redis_reply = ''# redis reply, bulk strings recv_size = 0
wflag = False
rflag = False # prevent being pushed into poll readable list before invoke connect()
queue = Queue.Queue()
buf = ""
last_try_conn = 0
def __init__(self, host, port, id, name):
asyncore.dispatcher.__init__(self)
self.host = host
self.port = port
self.id = id
self.name = name
#self.set_reuse_addr()
def add_cmd(self):
try:
self.queue.put_nowait(time.time()) #simply put an item to notify redisclient
#print "add to queue {}".format(self.port)
except Queue.Full:
print u"Error: add_cmd failed, queue is FULL !!!"
def set_callback(self, cb):
self.cb = cb
def asyn_info(self):
self.cmd = "*2\r\n$4\r\ninfo\r\n$3\r\nall\r\n"
def set_readable(self, flag = True):
self.rflag = flag
def set_writable(self, flag = True):
self.wflag = flag
def try_connect(self):
if not(self.is_connecting() or self.is_connected()):
if time.time() - self.last_try_conn > CONN_TIMEOUT:
print "{0}: connecting {1}:{2}".format(time.time(), self.host, self.port)
self.last_try_conn = time.time()
self.connect()
# inherted fucntions from asyncore.dispatcher
def connect(self):
assert(not(self.is_connecting() or self.is_connected()))
asyncore.dispatcher.connect(self, (self.host, self.port))
self.set_readable()
self.set_writable()
def handle_connect(self):
print u"{0}: connected to {1}:{2}".format(time.time(), self.host, self.port)
def handle_read(self):
#print 'handle_read...{0}'.format(self.port)
recv = self.recv(256)
self.recv_size += len(recv)
self.redis_reply += recv
last = rp.check_bulk_strings(self.redis_reply)
if (last != -1):
try:
logger.debug("{0}: redis reply from {1}:{2} , data_size = {3}".format(time.time(), self.host, self.port, len(self.redis_reply)))
self.cb.on_info(self.id, self.name, rp.remove_bulk_string_format(self.redis_reply[:last]))
self.redis_reply = self.redis_reply[last:]
if (len(self.redis_reply) > 0):
logger.warn("{0} remain {1}".format(self.port, len(self.redis_reply)))
except Exception as e:
print e
logger.error(e)
#else:
# print "{0} check bulk_strings failed! recv_size = {1}, data_size = {2}".format(self.port, len(recv), len(self.redis_reply))
def handle_write(self):
#print 'handle_write...'
if len(self.buf) > 0:
sent = self.send(self.buf)
self.buf=self.buf[sent:]
# readable, writeable can also treate as "local" poll timeout event handler
# of this redisclient.
def readable(self):
#print "{0}: readable".format(time.time())
return self.rflag
def empty_queue(self):
print "{0}: RedisClient::empty_queue".format(time.time())
while not self.queue.empty():
self.queue.get_nowait()
# This function also treat as poll timeout event.
def writable(self):
self.try_connect()
if self.is_connecting():
print "{0}:rediclient is connecting to:{1}:{2}".format(time.time(), self.host, self.port)
return True
if self.is_connected():
if len(self.buf) == 0:
try:
t = self.queue.get_nowait()
self.buf = self.cmd
#print "self.buf = self.cmd {}".format(self.port)
return True
except Queue.Empty:
return False
elif len(self.buf) > 0:
print "{0}:need to send remaining request data".format(time.time())
return True
else:
return False
def is_connected(self):
return self.
|
connected
def is_connecting(self):
return self.connecting
def handle_close(self):
print "{0}: handle close {1}:{2}".format(time.time(), self.host, self.port)
#traceback.print_stack()
self.set_reada
|
ble(False)
self.set_writable(False)
self.close() # remove old socket from asyncore pollable channel
# add new socket to poolable channel
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
'''
--------------- Test -----------------
r = RedisClient('127.0.0.1', 52021)
r.asyn_info()
collector = []
collector.append(r)
def timer_check_connection(collector):
#print '{0}: check connection... '.format(time.time())
for p in collector:
if not (p.is_connected() or p.is_connecting()):
print "{0}: connecting {1}:{2}".format(time.time(), p.host, p.port)
p.connect()
threading.Timer(5, timer_check_connection, args=[collector]).start()
def timer_add_cmd(collector):
#print '{0}: add_cmd... '.format(time.time())
for p in collector:
if p.is_connected():
print "{0}: add_cmd {1}:{2}".format(time.time(), p.host, p.port)
p.add_cmd()
threading.Timer(3, timer_add_cmd, args=[collector]).start()
threading.Timer(5, timer_check_connection, args=[collector]).start()
threading.Timer(5, timer_add_cmd, args=[collector]).start()
asyncore.loop(2)
'''
|
etherkit/OpenBeacon2
|
macos/venv/lib/python3.8/site-packages/_pyinstaller_hooks_contrib/hooks/stdhooks/hook-gadfly.py
|
Python
|
gpl-3.0
| 458
| 0
|
# ------------------------------------------------------------------
# Copyright (c) 2020 PyInstaller
|
Development Team.
#
# This file is distributed under the terms of the GNU General Public
# License (version 2.0 or later).
#
# The full license is available in LICENSE.GPL.txt, distributed with
# this software.
#
# SPDX-License-Identifier
|
: GPL-2.0-or-later
# ------------------------------------------------------------------
hiddenimports = ["sql_mar"]
|
adviti/melange
|
thirdparty/google_appengine/google/storage/speckle/python/api/rdbms.py
|
Python
|
apache-2.0
| 22,582
| 0.00806
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Python DB-API (PEP 249) interface to SQL Service.
http://www.python.org/dev/peps/pep-0249/
"""
import collections
import datetime
import exceptions
import os
import time
import types
from google.storage.speckle.proto import client_error_code_pb2
from google.storage.speckle.proto import client_pb2
from google.storage.speckle.proto import jdbc_type
from google.storage.speckle.proto import sql_pb2
from google.storage.speckle.python import api
from google.storage.speckle.python.api import converters
__path__ = api.__path__
OAUTH_CREDENTIALS_PATH = os.path.expanduser('~/.googlesql_oauth2.dat')
apilevel = '2.0'
threadsafety = 1
paramstyle = 'format'
version_info = (1, 2, 2, 'final', 0)
class Warning(StandardError, exceptions.Warning):
pass
class Error(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class DataError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class InternalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
Blob = converters.Blob
def Date(year, month, day):
return datetime.date(year, month, day)
def Time(hour, minute, second):
return datetime.time(hour, minute, second)
def Timestamp(year, month, day, hour, minute, second):
return datetime.datetime(year, month, day, hour, minute, second)
def DateFromTicks(ticks):
return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks):
return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
return Timestamp(*time.localtime(ticks)[:6])
def Binary(string):
return Blob(string)
STRING = unicode
BINARY = Blob
NUMBER = float
DATETIME = datetime.datetime
ROWID = int
_PYTHON_TYPE_TO_JDBC_TYPE = {
types.IntType: jdbc_type.INTEGER,
types.LongType: jdbc_type.INTEGER,
types.FloatType: jdbc_type.DOUBLE,
types.BooleanType: jdbc_type.BOOLEAN,
types.StringType: jdbc_type.VARCHAR,
types.UnicodeType: jdbc_type.VARCHAR,
datetime.date: jdbc_type.DATE,
datetime.datetime: jdbc_type.TIMESTAMP,
datetime.time: jdbc_type.TIME,
converters.Blob: jdbc_type.BLOB,
}
def _ConvertFormatToQmark(statement, args):
"""Replaces '%s' with '?'.
The server actually supports '?' for bind parameters, but the
MySQLdb implementation of PEP 249 uses '%s'. Most clients don't
bother checking the paramstyle member and just hardcode '%s' in
their statements. This function converts a format-style statement
into a qmark-style statement.
Args:
statement: A string, a SQL statement.
args: A sequence of arguments matching the statement's bind variables,
if any.
Returns:
The converted string.
"""
if args:
qmarks = tuple('?' * len(args))
return statement % qmarks
return statement
class Cursor(object):
def __init__(self, conn, use_dict_cursor=False):
"""Initializer.
Args:
conn: A Connection object.
use_dict_cursor: Optional boolean to convert each row of results into a
dictionary. Defaults to False.
"""
self._conn = conn
self._description = None
self._rowcount = -1
self.arraysize = 1
self._open = True
self.lastrowid = None
self._use_dict_cursor = use_dict_cursor
@property
def description(self):
return self._description
@property
def rowcount(self):
return self._rowcount
def close(self):
"""Marks the cursor as unusable for further operations."""
self._CheckOpen()
self._open = False
def _GetJdbcTypeForArg(self, arg):
"""Get the JDBC type which corresponds to the given Python object type."""
arg_jdbc_type = _PYTHON_TYPE_TO_JDBC_TYPE.get(type(arg))
if arg_jdbc_type:
return arg_jdbc_type
for python_t, jdbc_t in _PYTHON_TYPE_TO_JDBC_TYPE.items():
if isinstance(arg, python_t):
return jdbc_t
try:
return self._GetJdbcTypeForArg(arg[0])
except TypeError:
raise TypeError('unknown type')
def _EncodeVariable(self, arg):
"""Converts a variable to a type and value.
Args:
arg: Any tuple, string, numeric, or datetime object.
Returns:
A (int, str) tuple, representing a JDBC type and encoded value.
Raises:
TypeError: The argument is not a recognized type.
"""
arg_jdbc_type = self._GetJdbcTypeForArg(arg)
value = self._conn.encoders[type(arg)](arg, self._conn.encoders)
return arg_jdbc_type, value
def _DecodeVariable(self, datatype, value):
"""Converts a type and value to a variable.
Args:
datatype: An integer.
value: A string.
Returns:
An object of some appropriate type.
Raises:
InterfaceError: datatype is not a recognized JDBC type.
ValueError: The value could not be parsed.
"""
converter = self._conn.converter.get(datatype)
if converter is None:
raise InterfaceError('unknown JDBC type %d' % datatype)
return converter(value)
def execute(self, statement, args=None):
"""Prepares and executes a database operation (query or command).
Args:
statement: A string, a SQL statement.
args: A sequence of arguments matching the statement's bind variables,
if any.
Raises:
InterfaceError: Unknown type used as a bind variable.
DatabaseError: A SQL exception occurred.
OperationalError: RPC problem.
"""
self._CheckOpen()
request = sql_pb2.ExecRequest()
request.options.include_generated_keys = True
if args is not None:
if not hasattr(args, '__iter__'):
args = [args]
for i, arg in enumerate(args):
bv = request.bind_variable.add()
bv.position = i + 1
if arg is None:
bv.type = jdbc_type.NULL
else:
try:
bv.type, bv.value = self._EncodeVariable(arg)
except TypeError:
raise InterfaceError('unknown type %s for arg %d' % (type(arg), i))
request.statement = _ConvertFormatToQmar
|
k(statement, args)
response = self._conn.MakeRequest('Exec', request)
result = response.result
if result.HasField('sql_exception'):
raise DatabaseError('%d: %s' % (result.sql_exception.code,
result.sql_exception.message))
self._rows = collections.deque()
if result.rows.columns:
self._description = []
for column in result.rows.columns:
self._description.append(
(column.
|
label, column.type, column.display_size, None,
column.precision, column.scale, column.nullable))
else:
self._description = None
if result.rows.tuples:
assert self._description, 'Column descriptions do not exist.'
column_names = [col[0] for col in self._description]
self._rowcount = len(result.rows.tuples)
for tuple_proto in result.rows.tuples:
row = []
nulls = set(tuple_proto.nulls)
value_index = 0
for i, column_descr in enumerate(self._description):
if i in nulls:
row.append(None)
else:
row.append(self._DecodeVariable(column_descr[1],
tuple_proto.values[value_index]))
value_index += 1
if self._use_dict_cursor:
assert len(column_names) == len(row)
row = dict(zip(column_names, row))
else:
row = tuple(row)
self._rows.append(row)
else:
self._rowcount = result.rows_upd
|
yangleo/cloud-github
|
openstack_dashboard/enabled/_9001_developer.py
|
Python
|
apache-2.0
| 948
| 0
|
# Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either e
|
xpress or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf import settings
DASHBOARD = 'developer'
ADD_ANGULAR_MODULES = [
'horizon.dashboard.developer'
]
ADD_INSTALLED_APPS = [
'openstack_dashboard.contrib.developer'
]
ADD_SCSS_FILES = [
'dashboard/developer/developer.scss',
]
AUTO_DISCOVER_STATIC_FILES = True
DISAB
|
LED = True
if getattr(settings, 'DEBUG', False):
DISABLED = False
|
brianwc/juriscraper
|
opinions/united_states/state/calctapp_2nd.py
|
Python
|
bsd-2-clause
| 472
| 0.002119
|
# Scraper for California's Second District Court of Appeal
# CourtID: calctapp_2nd
# Court Short Name: Cal. Ct. App.
fr
|
om juriscraper.opinions.united_states.state import cal
class Site(cal.Site):
|
def __init__(self):
super(Site, self).__init__()
self.url = 'http://www.courtinfo.ca.gov/cgi-bin/opinions-blank.cgi?Courts=B'
self.court_id = self.__module__
def _get_divisions(self):
return ['2nd App. Dist.'] * len(self.case_names)
|
xin3liang/platform_external_chromium_org_third_party_WebKit
|
Tools/Scripts/webkitpy/common/checkout/baselineoptimizer.py
|
Python
|
bsd-3-clause
| 17,896
| 0.003856
|
# Copyright (C) 2011, Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import copy
import logging
from webkitpy.common.memoized import memoized
_log = logging.getLogger(__name__)
# FIXME: Should this function be somewhere more general?
def _invert_dictionary(dictionary):
inverted_dictionary = {}
for key, value in dictionary.items():
if inverted_dictionary.get(value):
inverted_dictionary[value].append(key)
else:
inverted_dictionary[value] = [key]
return inverted_dictionary
class BaselineOptimizer(object):
ROOT_LAYOUT_TESTS_DIRECTORY = 'LayoutTests'
def __init__(self, host, port_names, skip_scm_commands):
self._filesystem = host.filesystem
self._port_factory = host.port_factory
self._skip_scm_commands = skip_scm_commands
self._files_to_delete = []
self._files_to_add = []
self._scm = host.scm()
self._port_names = port_names
# Only used by unittests.
self.new_results_by_directory = []
def _baseline_root(self, port, baseline_name):
virtual_suite = port.lookup_virtual_suite(baseline_name)
if virtual_suite:
return self._filesystem.join(self.ROOT_LAYOUT_TESTS_DIRECTORY, virtual_suite.name)
return self.ROOT_LAYOUT_TESTS_DIRECTORY
def _baseline_search_path(self, port, baseline_name):
virtual_suite = port.lookup_virtual_suite(baseline_name)
if virtual_suite:
return port.virtual_baseline_search_path(baseline_name)
return port.baseline_search_path()
@memoized
def _relative_baseline_search_paths(self, port_name, baseline_name):
port = self._port_factory.get(port_name)
relative_paths = [self._filesystem.relpath(path, port.webkit_base()) for path in self._baseline_search_path(port, baseline_name)]
return relative_paths + [self._baseline_root(port, baseline_name)]
def _join_directory(self, directory, baseline_name):
# This code is complicated because both the directory name and the baseline_name have the virtual
# test suite in the name and the virtual baseline name is not a strict superset of the non-virtual name.
# For example, virtual/gpu/fast/canvas/foo-expected.png corresponds to fast/canvas/foo-expected.png and
# the baseline directories are like platform/mac/virtual/gpu/fast/canvas. So, to get the path
# to the baseline in the platform directory, we need to append jsut foo-expected.png to the directory.
virtual_suite = self._port_factory.get().lookup_virtual_suite(baseline_name)
if virtual_suite:
baseline_name_without_virtual = baseline_name[len(virtual_suite.name) + 1:]
else:
baseline_name_without_virtual = baseline_name
return self._filesystem.join(self._scm.checkout_root, directory, baseline_name_without_virtual)
def read_results_by_directory(self, baseline_name):
results_by_directory = {}
directories = reduce(set.union, map(set, [self._relative_baseline_search_paths(port_name, baseline_name) for port_name in self._port_names]))
f
|
or directory in directories:
path = self._join_directory(directory, baseline_name)
if self._filesystem.exists(path):
results_by_directory[directory] = self._filesystem.sha1
|
(path)
return results_by_directory
def _results_by_port_name(self, results_by_directory, baseline_name):
results_by_port_name = {}
for port_name in self._port_names:
for directory in self._relative_baseline_search_paths(port_name, baseline_name):
if directory in results_by_directory:
results_by_port_name[port_name] = results_by_directory[directory]
break
return results_by_port_name
@memoized
def _directories_immediately_preceding_root(self, baseline_name):
directories = set()
for port_name in self._port_names:
port = self._port_factory.get(port_name)
directory = self._filesystem.relpath(self._baseline_search_path(port, baseline_name)[-1], port.webkit_base())
directories.add(directory)
return directories
def _optimize_result_for_root(self, new_results_by_directory, baseline_name):
# The root directory (i.e. LayoutTests) is the only one that doesn't correspond
# to a specific platform. As such, it's the only one where the baseline in fallback directories
# immediately before it can be promoted up, i.e. if win and mac
# have the same baseline, then it can be promoted up to be the LayoutTests baseline.
# All other baselines can only be removed if they're redundant with a baseline earlier
# in the fallback order. They can never promoted up.
directories_immediately_preceding_root = self._directories_immediately_preceding_root(baseline_name)
shared_result = None
root_baseline_unused = False
for directory in directories_immediately_preceding_root:
this_result = new_results_by_directory.get(directory)
# If any of these directories don't have a baseline, there's no optimization we can do.
if not this_result:
return
if not shared_result:
shared_result = this_result
elif shared_result != this_result:
root_baseline_unused = True
baseline_root = self._baseline_root(self._port_factory.get(), baseline_name)
# The root baseline is unused if all the directories immediately preceding the root
# have a baseline, but have different baselines, so the baselines can't be promoted up.
if root_baseline_unused:
if baseline_root in new_results_by_directory:
del new_results_by_directory[baseline_root]
return
new_results_by_directory[baseline_root] = shared_result
for directory in directories_immediately_preceding_root:
del new_results_by_directory[directory]
def _find_optimal_result_placement(self, baseline_name):
results_by_directory = self.read_results_by_directory(baseline_name)
results_by_port_name = self._results_by_port_name(results_by_directory, baseline_name)
port_names_by_result = _invert_dictionary(results_by_port_name)
new_results_by_directory = self._remove_redundant_results(results_by_directory, results_by_port_name, port_names_by_result, baseline_name)
self._optimize_result_for_root(new_results_by_directory, baseline_name)
return results_by_directory, new_results_by_directory
def _
|
dimtruck/magnum
|
magnum/common/pythonk8sclient/swagger_client/models/v1_service_status.py
|
Python
|
apache-2.0
| 2,814
| 0
|
# coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
class V1ServiceStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Swagger model
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'load_balancer': 'V1LoadBalancerStatus'
}
self.attribute_map = {
'load_balancer': 'loadBalancer'
}
self._load_balancer = None
@property
def load_balancer(self):
"""
Gets the load_balancer of this V1ServiceStatus.
status of load-balancer
:return: The load_balancer of this V1ServiceStatus.
:rtype: V1LoadBalancerStatus
"""
return self._load_balancer
@load_balancer.setter
def load_balancer(self, load_balancer):
"""
Sets the load_balancer of this V1ServiceStatus.
status of load-balancer
:param load_balancer: The load_balancer of this V1ServiceStatus.
:type: V1LoadBalancerStatus
"""
self._load_balancer = load_balancer
def to_dict(self):
"""
Return model properties dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, li
|
st):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[att
|
r] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Return model properties str
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
|
googleapis/python-appengine-admin
|
samples/generated_samples/appengine_v1_generated_services_get_service_sync.py
|
Python
|
apache-2.0
| 1,416
| 0.000706
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetService
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work
|
in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-appengine-admin
# [START appengine_v1_generated_Services_GetService_sync]
from google.cloud import appengine_admin_v1
def sample_get_service():
# Create a client
client = appengine_admin_v1.ServicesClient()
# Initialize request argument(s)
request = appengine_admin_v1.GetServiceRequest(
)
# Make the request
resp
|
onse = client.get_service(request=request)
# Handle the response
print(response)
# [END appengine_v1_generated_Services_GetService_sync]
|
ruchee/vimrc
|
vimfiles/bundle/vim-python/submodules/pylama/pylama/config.py
|
Python
|
mit
| 8,032
| 0
|
"""Parse arguments from command line and configuration files."""
import fnmatch
import os
import sys
import re
import logging
from argparse import ArgumentParser
from . import __version__
from .libs.inirama import Namespace
from .lint.extensions import LINTERS
#: A default checkers
DEFAULT_LINTERS = 'pycodestyle', 'pyflakes', 'mccabe'
CURDIR = os.getcwd()
CONFIG_FILES = 'pylama.ini', 'setup.cfg', 'tox.ini', 'pytest.ini'
#: The skip pattern
SKIP_PATTERN = re.compile(r'# *noqa\b', re.I).search
# Parse a modelines
MODELINE_RE = re.compile(
r'^\s*#\s+(?:pylama:)\s*((?:[\w_]*=[^:\n\s]+:?)+)',
re.I | re.M)
# Setup a logger
LOGGER = logging.getLogger('pylama')
LOGGER.propagate = False
STREAM = logging.StreamHandler(sys.stdout)
LOGGER.addHandler(STREAM)
class _Default(object): # pylint: disable=too-few-public-methods
def __init__(self, value=None):
self.value = value
def __str__(self):
return str(self.value)
def __repr__(self):
return "<_Default [%s]>" % self.value
def split_csp_str(val):
""" Split comma separated string into unique values, keeping their order.
:returns: list of splitted values
"""
seen = set()
values = val if isinstance(val, (list, tuple)) else val.strip().split(',')
return [x for x in values if x and not (x in seen or seen.add(x))]
def parse_linters(linters):
""" Initialize choosen linters.
:returns: list of inited linters
"""
result = list()
for name in split_csp_str(linters):
linter = LINTERS.get(name)
if linter:
result.append((name, linter))
else:
logging.warning("Linter `%s` not found.", name)
return result
def get_default_config_file(rootdir=None):
"""Search for configuration file."""
if rootdir is None:
return DEFAULT_CONFIG_FILE
for path in CONFIG_FILES:
path = os.path.join(rootdir, path)
if os.path.isfile(path) and os.access(path, os.R_OK):
return path
DEFAULT_CONFIG_FILE = get_default_config_file(CURDIR)
PARSER = ArgumentParser(description="Code audit tool for python.")
PARSER.add_argument(
"paths", nargs='*', default=_Default([CURDIR]),
help="Paths to files or directories for code check.")
PARSER.add_argument(
"--verbose", "-v", action='store_true', help="Verbose mode.")
PARSER.add_argument('--version', action='version',
version='%(prog)s ' + __version__)
PARSER.add_argument(
"--format", "-f", default=_Default('pycodestyle'),
|
choices=['pep8', 'pycodestyle', 'pylint', 'parsable'],
help="Choose errors format (pycodestyle, pylint, parsable).")
PARSER.add_argument(
"--select", "-s", default=_Default(''), type=split_csp_str,
help="Select errors and warnings. (comma-separated list)")
PARSER.add_argument(
"--sort", default=_Default(''), type=split_csp_str,
|
help="Sort result by error types. Ex. E,W,D")
PARSER.add_argument(
"--linters", "-l", default=_Default(','.join(DEFAULT_LINTERS)),
type=parse_linters, help=(
"Select linters. (comma-separated). Choices are %s."
% ','.join(s for s in LINTERS)
))
PARSER.add_argument(
"--ignore", "-i", default=_Default(''), type=split_csp_str,
help="Ignore errors and warnings. (comma-separated)")
PARSER.add_argument(
"--skip", default=_Default(''),
type=lambda s: [re.compile(fnmatch.translate(p))
for p in s.split(',') if p],
help="Skip files by masks (comma-separated, Ex. */messages.py)")
PARSER.add_argument("--report", "-r", help="Send report to file [REPORT]")
PARSER.add_argument(
"--hook", action="store_true", help="Install Git (Mercurial) hook.")
PARSER.add_argument(
"--concurrent", "--async", action="store_true",
help="Enable async mode. Useful for checking a lot of files. "
"Unsupported with pylint.")
PARSER.add_argument(
"--options", "-o", default=DEFAULT_CONFIG_FILE, metavar='FILE',
help="Specify configuration file. "
"Looks for {}, or {} in the current directory (default: {}).".format(
", ".join(CONFIG_FILES[:-1]), CONFIG_FILES[-1],
DEFAULT_CONFIG_FILE))
PARSER.add_argument(
"--force", "-F", action='store_true', default=_Default(False),
help="Force code checking (if linter doesn't allow)")
PARSER.add_argument(
"--abspath", "-a", action='store_true', default=_Default(False),
help="Use absolute paths in output.")
ACTIONS = dict((a.dest, a)
for a in PARSER._actions) # pylint: disable=protected-access
def parse_options(args=None, config=True, rootdir=CURDIR, **overrides): # noqa
""" Parse options from command line and configuration files.
:return argparse.Namespace:
"""
args = args or []
# Parse args from command string
options = PARSER.parse_args(args)
options.file_params = dict()
options.linters_params = dict()
# Compile options from ini
if config:
cfg = get_config(str(options.options), rootdir=rootdir)
for opt, val in cfg.default.items():
LOGGER.info('Find option %s (%s)', opt, val)
passed_value = getattr(options, opt, _Default())
if isinstance(passed_value, _Default):
if opt == 'paths':
val = val.split()
if opt == 'skip':
val = fix_pathname_sep(val)
setattr(options, opt, _Default(val))
# Parse file related options
for name, opts in cfg.sections.items():
if name == cfg.default_section:
continue
if name.startswith('pylama'):
name = name[7:]
if name in LINTERS:
options.linters_params[name] = dict(opts)
continue
mask = re.compile(fnmatch.translate(fix_pathname_sep(name)))
options.file_params[mask] = dict(opts)
# Override options
_override_options(options, **overrides)
# Postprocess options
for name in options.__dict__:
value = getattr(options, name)
if isinstance(value, _Default):
setattr(options, name, process_value(name, value.value))
if options.concurrent and 'pylint' in options.linters:
LOGGER.warning('Can\'t parse code asynchronously with pylint enabled.')
options.concurrent = False
return options
def _override_options(options, **overrides):
"""Override options."""
for opt, val in overrides.items():
passed_value = getattr(options, opt, _Default())
if opt in ('ignore', 'select') and passed_value:
value = process_value(opt, passed_value.value)
value += process_value(opt, val)
setattr(options, opt, value)
elif isinstance(passed_value, _Default):
setattr(options, opt, process_value(opt, val))
def process_value(name, value):
""" Compile option value. """
action = ACTIONS.get(name)
if not action:
return value
if callable(action.type):
return action.type(value)
if action.const:
return bool(int(value))
return value
def get_config(ini_path=None, rootdir=None):
""" Load configuration from INI.
:return Namespace:
"""
config = Namespace()
config.default_section = 'pylama'
if not ini_path:
path = get_default_config_file(rootdir)
if path:
config.read(path)
else:
config.read(ini_path)
return config
def setup_logger(options):
"""Do the logger setup with options."""
LOGGER.setLevel(logging.INFO if options.verbose else logging.WARN)
if options.report:
LOGGER.removeHandler(STREAM)
LOGGER.addHandler(logging.FileHandler(options.report, mode='w'))
if options.options:
LOGGER.info('Try to read configuration from: %r', options.options)
def fix_pathname_sep(val):
"""Fix pathnames for Win."""
return val.replace(os.altsep or "\\", os.sep)
# pylama:ignore=W0212,D210,F0001
|
dayaftereh/scripts
|
python/tcu/lib/config.py
|
Python
|
apache-2.0
| 3,690
| 0.002168
|
import os
import json
import logging
_DEFAULT_CONFIG_DIR = "config"
_DEFAULT_CONFIG_NAME = "config.json"
_DEFAULT_LOG_CONFIG_NAME = "default_log.json"
######################################################################################################
def load(path):
json_content = _load_json(path)
logging.info("loaded configuration from [ %s ]", path)
config = Configuration(json_content)
return config
def _load_json(path):
with file(path) as f:
content = json.load(f)
return content
def get_default_config(path):
conf_dir = get_config_dir(path)
conf_file = os.path.join(conf_dir, _DEFAULT_CONFIG_NAME)
return os.path.abspath(conf_file)
def get_config_dir(path):
dirname = os.path.dirname(path)
config_dir = os.path.join(dirname, _DEFAULT_CONFIG_DIR)
return os.path.abspath(config_dir)
def get_default_log_config(path):
conf_dir = get_config_dir(path)
conf_file = os.path.join(conf_dir, _DEFAULT_LOG_CONFIG_NAME)
return os.path.abspath(conf_file)
######################################################################################################
class ConfigurationException(Exception):
def __init__(self, message, *args):
self.args = args
self.message = message
def _to_string(self):
if self.args:
return str(self.message) % self.args
return str(self.message)
def __str__(self):
return self._to_string()
######################################################################################################
class Configuration:
def __init__(self, content):
self._content = content
######################################################################################################
def get(self, key):
value = self.get_default(key, None)
if value is None:
raise ConfigurationException("can't find configuration key [ %s ] in configuration file [ %s ]",
key,
self._path)
return value
def get_default(self, key,
|
default_value=None):
self._valid_content()
path = map(lambda x: x.strip(), key.split('.'))
value = self._find(self._content, path)
if value is None:
return default_value
return value
######################################################################################################
def as_i
|
nt(self, key):
value = self.get(key)
return int(value)
def as_int_default(self, key, default_value=None):
value = self.get_default(key, default_value)
return int(value)
def as_float(self, key):
value = self.get(key)
return float(value)
def as_float_default(self, key, default_value=None):
value = self.get_default(key, default_value)
return float(value)
def as_string(self, key):
value = self.get(key)
return str(value)
def as_string_default(self, key, default_value):
value = self.get_default(key, default_value)
return str(value)
######################################################################################################
def _valid_content(self):
if self._content is None:
raise ConfigurationException("configuration content is empty or not loaded!")
def _find(self, element, path):
if not path:
return element
next_key = path[0]
if next_key in element:
path.remove(next_key)
next_element = element[next_key]
return self._find(next_element, path)
return None
|
llby/tasks-for-notebook
|
tasks_for_notebook/tasks_for_notebook.py
|
Python
|
mit
| 3,217
| 0.020827
|
import os
import json
import pandas
import numpy
from IPython.display import HTML
from datetime import datetime
import pandas_highcharts.core
title_name = 'Tasks'
file_name = 'tasks.csv'
css_dt_name = '//cdn.datatables.net/1.10.12/css/jquery.dataTables.min.css'
js_dt_name = '//cdn.datatables.net/1.10.12/js/jquery.dataTables.min'
js_hc_name_1 = '//code.highcharts.com/highcharts'
js_hc_name_2 = '//code.highcharts.com/modules/exporting'
def read_task():
if os.path.exists(file_name):
return pandas.DataFrame.from_csv(file_name)
else:
return pandas.DataFrame()
def save_task(data):
pandas.DataFrame(data).to_csv(file_name)
def add_task(name, content):
data = read_task()
df = pandas.DataFrame([{
'name':name,
'content':content,
'status':'new',
'created_at':datetime.now().strft
|
ime("%Y/%m/%d %H:%M:%S")
}], columns = ['name', 'content', 'status', 'created_at', 'updated_at'
|
])
data = data.append(df, ignore_index=True)
save_task(data)
def render_task(data):
js = '''
<link rel='stylesheet' type='text/css' href='%s'>
<script>
require.config({
paths: {
dataTables: '%s'
}
});
require(['dataTables'], function(){
$('.dataframe').DataTable();
});
</script>
'''%(css_dt_name, js_dt_name)
return HTML('<h2>%s</h2>'%(title_name) + data.to_html(classes="display") + js)
def show_done_task():
data = read_task()
data = data[data['status'] == 'done']
return render_task(data)
def show_task():
data = read_task()
data = data[data['status'] != 'done']
return render_task(data)
def update_task(id, **kwargs):
data = read_task()
if kwargs.get('name'):
data.loc.__setitem__((slice(id, id), 'name'), kwargs.get('name'))
if kwargs.get('content'):
data.loc.__setitem__((slice(id, id), 'content'), kwargs.get('content'))
if kwargs.get('status'):
data.loc.__setitem__((slice(id, id), 'status'), kwargs.get('status'))
data.loc.__setitem__((slice(id, id), 'updated_at'), datetime.now().strftime("%Y/%m/%d %H:%M:%S"))
save_task(data)
def delete_task(id):
data = read_task()
data = data.drop(id)
save_task(data)
def backup_task():
os.system( "mkdir backup" )
os.system( "cp %s backup/%s_%s"%(file_name, datetime.now().strftime("%Y%m%d%H%M%S"), file_name) )
def render_graph(data):
chart = pandas_highcharts.core.serialize(data, title=title_name, zoom="xy", output_type='dict')
chart['subtitle'] = {"text": "created tasks", "x": -20}
html = HTML('''
<div id="chart1" style="min-width: 400px; height: 400px; margin: 0 auto"></div>
<script>
require.config({
paths: {
highcharts: '%s',
exporting: '%s'
}
});
require(['highcharts','exporting'], function(){
$('#chart1').highcharts(%s);
});
</script>
''' %(js_hc_name_1, js_hc_name_2, json.dumps(chart)))
return html
def graph_task():
data = read_task()
data['datetime'] = pandas.to_datetime(data['created_at'])
data['count'] = data['name'].count()
data = data.groupby([data['datetime'].dt.year, data['datetime'].dt.month, data['datetime'].dt.day])['count'].count()
data = pandas.DataFrame(data)
return render_graph(data)
|
wchan/tensorflow
|
tensorflow/contrib/learn/python/learn/ops/dnn_ops.py
|
Python
|
apache-2.0
| 2,001
| 0
|
"""TensorFlow ops for deep neural networks."""
# Copyright 2015-present The Scikit Flow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops import nn
from tensorflow.python.ops import rnn_cell
from tensorflow.python.ops import variable_scope as vs
from tensorflow.contrib.learn.python.learn.ops import dropout_ops
def dnn(tensor_in, hidden_units, activation=nn.relu, dropout=None):
"""Creates fully connected deep neural network subgraph.
Args:
tensor_in: tensor or placeholder for input features.
hidden_units: list of counts of hidden units in each layer.
activation: activation function between layers. Can be None.
dropout: if not Non
|
e, will add a dropout layer with given
probability.
Returns:
A tensor which would be a deep neural network.
"""
with vs.variable_scope('dnn'):
for i, n_units in enumerate(hidden_units):
with vs.variable_scope('layer%d' % i):
tensor_in = rnn_cell.linear(tensor_in, n_units, True)
if activation is not None:
|
tensor_in = activation(tensor_in)
if dropout is not None:
tensor_in = dropout_ops.dropout(tensor_in,
prob=(1.0 - dropout))
return tensor_in
|
ff94315/hiwifi-openwrt-HC5661-HC5761
|
staging_dir/host/lib64/scons-2.1.0/SCons/Script/SConscript.py
|
Python
|
gpl-2.0
| 24,428
| 0.002702
|
"""SCons.Script.SConscript
This module defines the Python API provided to SConscript and SConstruct
files.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import division
__revision__ = "src/engine/SCons/Script/SConscript.py 5357 2011/09/09 21:31:03 bdeegan"
import SCons
import SCons.Action
import SCons.Builder
import SCons.Defaults
import SCons.Environment
import SCons.Errors
import SCons.Node
import SCons.Node.Alias
import SCons.Node.FS
import SCons.Platform
import SCons.SConf
import SCons.Script.Main
import SCons.Tool
import SCons.Util
import collections
import os
import os.path
import re
import sys
import traceback
# The following variables used to live in this module. Some
# SConscript files out there may have referred to them directly as
# SCons.Script.SConscript.*. This is now supported by some special
# handling towards the bottom of the SConscript.__init__.py module.
#Arguments = {}
#ArgList = []
#BuildTargets = TargetList()
#CommandLineTargets = []
#DefaultTargets = []
class SConscriptReturn(Exception):
pass
launch_dir = os.path.abspath(os.curdir)
GlobalDict = None
# global exports set by Export():
global_exports = {}
# chdir flag
sconscript_chdir = 1
def get_calling_namespaces():
"""Return the locals and globals for the function that called
into this module in the current call stack."""
try: 1//0
except ZeroDivisionError:
# Don't start iterating with the current stack-frame to
# prevent creating reference cycles (f_back is safe).
frame = sys.exc_info()[2].tb_frame.f_back
# Find the first frame that *isn't* from this file. This means
# that we expect all of the SCons frames that implement an Export()
# or SConscript() call to be in this file, so that we can identify
# the first non-Script.SConscript frame as the user's local calling
# environment, and the locals and globals dictionaries from that
# frame as the calling namespaces. See the comment below preceding
# the DefaultEnvironmentCall block for even more explanation.
while frame.f_globals.get("__name__") == __name__:
frame = frame.f_back
return frame.f_locals, frame.f_globals
def compute_exports(exports):
"""Compute a dictionary of exports given one of the parameters
to the Export() function or the exports argument to SConscript()."""
loc, glob = get_calling_namespaces()
retval = {}
try:
for export in exports:
if SCons.Util.is_Dict(export):
retval.update(export)
else:
try:
retval[export] = loc[export]
except KeyError:
retval[export] = glob[export]
except KeyError, x:
raise SCons.Errors.UserError("Export of non-existent variable '%s'"%x)
return retval
class Frame(object):
"""A frame on the SConstruct/SConscript call stack"""
def __init__(self, fs, exports, sconscript):
self.globals = BuildDefaultGlobals()
self.retval = None
self.prev_dir = fs.getcwd()
self.exports = compute_exports(exports) # exports from the calling SConscript
# make sure the sconscript attr is a Node.
if isinstance(sconscript, SCons.Node.Node):
self.sconscript = sconscript
elif sconscript == '-':
self.sconscript = None
else:
self.sconscript = fs.File(str(sconscript))
# the SConstruct/SConscript call stack:
call_stack = []
# For documentation on the methods in this file, see the scons man-page
def Return(*vars, **kw):
retval = []
try:
fvars = SCons.Util.flatten(vars)
for var in fvars:
for v in var.split():
retval.append(call_stack[-1].globals[v])
except KeyError, x:
|
raise SCons.Errors.UserError("Return of non-existent variable '%s'"%x)
if len(retval) == 1:
call_stack[-1].retval = retval[0]
else:
call_stack[-1].retval = tuple(retval)
stop = kw.get('stop', True)
if stop:
raise SConscriptReturn
st
|
ack_bottom = '% Stack boTTom %' # hard to define a variable w/this name :)
def _SConscript(fs, *files, **kw):
top = fs.Top
sd = fs.SConstruct_dir.rdir()
exports = kw.get('exports', [])
# evaluate each SConscript file
results = []
for fn in files:
call_stack.append(Frame(fs, exports, fn))
old_sys_path = sys.path
try:
SCons.Script.sconscript_reading = SCons.Script.sconscript_reading + 1
if fn == "-":
exec sys.stdin in call_stack[-1].globals
else:
if isinstance(fn, SCons.Node.Node):
f = fn
else:
f = fs.File(str(fn))
_file_ = None
# Change directory to the top of the source
# tree to make sure the os's cwd and the cwd of
# fs match so we can open the SConscript.
fs.chdir(top, change_os_dir=1)
if f.rexists():
actual = f.rfile()
_file_ = open(actual.get_abspath(), "r")
elif f.srcnode().rexists():
actual = f.srcnode().rfile()
_file_ = open(actual.get_abspath(), "r")
elif f.has_src_builder():
# The SConscript file apparently exists in a source
# code management system. Build it, but then clear
# the builder so that it doesn't get built *again*
# during the actual build phase.
f.build()
f.built()
f.builder_set(None)
if f.exists():
_file_ = open(f.get_abspath(), "r")
if _file_:
# Chdir to the SConscript directory. Use a path
# name relative to the SConstruct file so that if
# we're using the -f option, we're essentially
# creating a parallel SConscript directory structure
# in our local directory tree.
#
# XXX This is broken for multiple-repository cases
# where the SConstruct and SConscript files might be
# in different Repositories. For now, cross that
# bridge when someone comes to it.
try:
src_dir = kw['src_dir']
except KeyError:
ldir = fs.Dir(f.dir.get_path(sd))
else:
ldir = fs.Dir(src_dir)
if not ldir.is_under(f.dir):
# They specified a source directory, but
# it's above the SConscript directory.
# Do the sensible thing and just use the
# SConcript directory.
|
ardi69/pyload-0.4.10
|
pyload/plugin/crypter/FiletramCom.py
|
Python
|
gpl-3.0
| 843
| 0.017794
|
# -*- coding: utf-8 -*-
from pyload.plugin.internal.SimpleCrypter import SimpleCrypter
class FiletramCom(SimpleCrypter):
__name = "FiletramCom"
__type = "crypter"
__version = "0.03"
__pattern = r'http://(?:www\.)?filetram\.com/[^/]+/.+'
__config = [("use_premium" , "bool", "Use premium account if available" , True),
("use_subfolder" ,
|
"bool", "Save package to subfolder" , True),
("subfolder_per_pack", "bool", "Create a subfolder for each package", True)]
__description = """Filetram.com decrypter plugin"""
__license = "GPLv3"
__authors = [("igel", "igelkun@myopera.com"),
("stickell", "l.stickell@yahoo.it")]
LIN
|
K_PATTERN = r'\s+(http://.+)'
NAME_PATTERN = r'<title>(?P<N>.+?) - Free Download'
|
hhjiang/mcores
|
setup.py
|
Python
|
mit
| 436
| 0
|
from distutils.core import setup, Extension
import numpy
from Cython.Distutils import build_ext
setup(
name='MCores',
vers
|
ion='1.0',
cmdclass={'build_ext': build_ext},
ext_modules=[Extension("MCores",
sources=["kernelModesCluster.pyx"],
language="c++",
include_dirs=[numpy.get_include()])],
author='Heinrich Jiang',
autho
|
r_email='heinrich.jiang@gmail.com'
)
|
jamslevy/gsoc
|
app/django/core/mail.py
|
Python
|
apache-2.0
| 14,299
| 0.001818
|
"""
Tools for sending email.
"""
import mimetypes
import os
import smtplib
import socket
import time
import random
from email import Charset, Encoders
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.Header import Header
from email.Utils import formatdate, parseaddr, formataddr
from django.conf import settings
from django.utils.encoding import smart_str, force_unicode
# Don't BASE64-encode UT
|
F-8 messages so that we avoid unwanted attention from
# some spam filters.
Charset.add_charset('utf-8', Charset.SHORTEST, Charset.QP, 'utf-8')
# Default MIME type to use on attachments (if it is not explicitly give
|
n
# and cannot be guessed).
DEFAULT_ATTACHMENT_MIME_TYPE = 'application/octet-stream'
# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of
# seconds, which slows down the restart of the server.
class CachedDnsName(object):
def __str__(self):
return self.get_fqdn()
def get_fqdn(self):
if not hasattr(self, '_fqdn'):
self._fqdn = socket.getfqdn()
return self._fqdn
DNS_NAME = CachedDnsName()
# Copied from Python standard library, with the following modifications:
# * Used cached hostname for performance.
# * Added try/except to support lack of getpid() in Jython (#5496).
def make_msgid(idstring=None):
"""Returns a string suitable for RFC 2822 compliant Message-ID, e.g:
<20020201195627.33539.96671@nightshade.la.mastaler.com>
Optional idstring if given is a string used to strengthen the
uniqueness of the message id.
"""
timeval = time.time()
utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval))
try:
pid = os.getpid()
except AttributeError:
# No getpid() in Jython, for example.
pid = 1
randint = random.randrange(100000)
if idstring is None:
idstring = ''
else:
idstring = '.' + idstring
idhost = DNS_NAME
msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, idhost)
return msgid
class BadHeaderError(ValueError):
pass
def forbid_multi_line_headers(name, val):
"""Forbids multi-line headers, to prevent header injection."""
val = force_unicode(val)
if '\n' in val or '\r' in val:
raise BadHeaderError("Header values can't contain newlines (got %r for header %r)" % (val, name))
try:
val = val.encode('ascii')
except UnicodeEncodeError:
if name.lower() in ('to', 'from', 'cc'):
result = []
for item in val.split(', '):
nm, addr = parseaddr(item)
nm = str(Header(nm, settings.DEFAULT_CHARSET))
result.append(formataddr((nm, str(addr))))
val = ', '.join(result)
else:
val = Header(val, settings.DEFAULT_CHARSET)
else:
if name.lower() == 'subject':
val = Header(val)
return name, val
class SafeMIMEText(MIMEText):
def __setitem__(self, name, val):
name, val = forbid_multi_line_headers(name, val)
MIMEText.__setitem__(self, name, val)
class SafeMIMEMultipart(MIMEMultipart):
def __setitem__(self, name, val):
name, val = forbid_multi_line_headers(name, val)
MIMEMultipart.__setitem__(self, name, val)
class SMTPConnection(object):
"""
A wrapper that manages the SMTP network connection.
"""
def __init__(self, host=None, port=None, username=None, password=None,
use_tls=None, fail_silently=False):
self.host = host or settings.EMAIL_HOST
self.port = port or settings.EMAIL_PORT
self.username = username or settings.EMAIL_HOST_USER
self.password = password or settings.EMAIL_HOST_PASSWORD
self.use_tls = (use_tls is not None) and use_tls or settings.EMAIL_USE_TLS
self.fail_silently = fail_silently
self.connection = None
def open(self):
"""
Ensures we have a connection to the email server. Returns whether or
not a new connection was required (True or False).
"""
if self.connection:
# Nothing to do if the connection is already open.
return False
try:
# If local_hostname is not specified, socket.getfqdn() gets used.
# For performance, we use the cached FQDN for local_hostname.
self.connection = smtplib.SMTP(self.host, self.port,
local_hostname=DNS_NAME.get_fqdn())
if self.use_tls:
self.connection.ehlo()
self.connection.starttls()
self.connection.ehlo()
if self.username and self.password:
self.connection.login(self.username, self.password)
return True
except:
if not self.fail_silently:
raise
def close(self):
"""Closes the connection to the email server."""
try:
try:
self.connection.quit()
except socket.sslerror:
# This happens when calling quit() on a TLS connection
# sometimes.
self.connection.close()
except:
if self.fail_silently:
return
raise
finally:
self.connection = None
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of email
messages sent.
"""
if not email_messages:
return
new_conn_created = self.open()
if not self.connection:
# We failed silently on open(). Trying to send would be pointless.
return
num_sent = 0
for message in email_messages:
sent = self._send(message)
if sent:
num_sent += 1
if new_conn_created:
self.close()
return num_sent
def _send(self, email_message):
"""A helper method that does the actual sending."""
if not email_message.recipients():
return False
try:
self.connection.sendmail(email_message.from_email,
email_message.recipients(),
email_message.message().as_string())
except:
if not self.fail_silently:
raise
return False
return True
class EmailMessage(object):
"""
A container for email information.
"""
content_subtype = 'plain'
multipart_subtype = 'mixed'
encoding = None # None => use settings default
def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
connection=None, attachments=None, headers=None):
"""
Initialize a single email message (which can be sent to multiple
recipients).
All strings used to create the message can be unicode strings (or UTF-8
bytestrings). The SafeMIMEText class will handle any necessary encoding
conversions.
"""
if to:
assert not isinstance(to, basestring), '"to" argument must be a list or tuple'
self.to = list(to)
else:
self.to = []
if bcc:
assert not isinstance(bcc, basestring), '"bcc" argument must be a list or tuple'
self.bcc = list(bcc)
else:
self.bcc = []
self.from_email = from_email or settings.DEFAULT_FROM_EMAIL
self.subject = subject
self.body = body
self.attachments = attachments or []
self.extra_headers = headers or {}
self.connection = connection
def get_connection(self, fail_silently=False):
if not self.connection:
self.connection = SMTPConnection(fail_silently=fail_silently)
return self.connection
def message(self):
encoding = self.encoding or settings.DEFAULT_CHARSET
msg = SafeMIMEText(smart_str(self.body, settings.DEFAULT_CHARSET),
self.content_subtype, encoding)
if self.attachments:
body_msg =
|
PowerDNS/exabgp
|
lib/exabgp/reactor/network/connection.py
|
Python
|
bsd-3-clause
| 7,221
| 0.041961
|
# encoding: utf-8
"""
network.py
Created by Thomas Mangin on 2009-09-06.
Copyright (c) 2009-2013 Exa Networks. All rights reserved.
"""
import time
import random
import socket
import select
from struct import unpack
from exabgp.configuration.environment import environment
from exabgp.util.od import od
from exabgp.util.errstr import errstr
from exabgp.logger import Logger
from exabgp.logger import FakeLogger
from exabgp.logger import LazyFormat
from exabgp.bgp.message import Message
from exabgp.reactor.network.error import error
from exabgp.reactor.network.error import errno
from exabgp.reactor.network.error import NetworkError
from exabgp.reactor.network.error import TooSlowError
from exabgp.reactor.network.error import NotConnected
from exabgp.reactor.network.error import LostConnection
from exabgp.reactor.network.error import NotifyError
from .error import *
class Connection (object):
direction = 'undefined'
identifier = 0
def __init__ (self,afi,peer,local):
# peer and local are strings of the IP
try:
self.defensive = environment.settings().debug.defensive
self.logger = Logger()
except RuntimeError:
self.defensive = True
self.logger = FakeLogger()
self.afi = afi
self.peer = peer
self.local = local
self._buffer = ''
self.io = None
self.established = False
self.identifier += 1
self.id = self.identifier
# Just in case ..
def __del__ (self):
if self.io:
self.logger.network("%s connection to %s closed" % (self.name(),self.peer),'info')
self.close()
def name (self):
return "session %d %s" % (self.id,self.direction)
def close (self):
try:
self.logger.wire("%s, closing connection from %s to %s" % (self.name(),self.local,self.peer))
if self.io:
self.io.close()
self.io = None
except KeyboardInterrupt,e:
raise e
except:
pass
def reading (self):
while True:
try:
r,_,_ = select.select([self.io,],[],[],0)
except select.error,e:
if e.args[0] not in error.block:
self.close()
self.logger.wire("%s %s errno %s on socket" % (self.name(),self.peer,errno.errorcode[e.args[0]]))
raise NetworkError('errno %s on socket' % errno.errorcode[e.args[0]])
return False
return r != []
def writing (self):
while True:
try:
_,w,_ = select.select([],[self.io,],[],0)
except select.error,e:
if e.args[0] not in error.block:
self.close()
self.logger.wire("%s %s errno %s on socket" % (self.name(),self.peer,errno.errorcode[e.args[0]]))
raise NetworkError('errno %s on socket' % errno.errorcode[e.args[0]])
return False
return w != []
def _reader (self,number):
# The function must not be called if it does not return with no data with a smaller size as parameter
if not self.io:
self.close()
raise NotConnected('Trying to read on a closed TCP conncetion')
if number == 0:
yield ''
return
while not self.reading():
yield ''
data = ''
reported = ''
while True:
try:
while True:
if self.defensive and random.randint(0,2):
raise socket.error(errno.EAGAIN,'raising network error in purpose')
read = self.io.recv(number)
if not read:
self.close()
self.logger.wire("%s %s lost TCP session with peer" % (self.name(),self.peer))
raise LostConnection('the TCP connection was closed by the remote end')
data += read
number -= len(read)
if not number:
self.logger.wire(LazyFormat("%s %-32s RECEIVED " % (self.name(),'%s / %s
|
' % (self.local,self.peer)),od,read))
yield data
return
yield ''
except socket.timeout,e:
self.close()
self.logger.wire("%s %s peer is too slow" % (self.name(),self.peer))
raise TooSlowError('Timeout while reading data from the network
|
(%s)' % errstr(e))
except socket.error,e:
if e.args[0] in error.block:
message = "%s %s blocking io problem mid-way through reading a message %s, trying to complete" % (self.name(),self.peer,errstr(e))
if message != reported:
reported = message
self.logger.wire(message,'debug')
yield ''
elif e.args[0] in error.fatal:
self.close()
raise LostConnection('issue reading on the socket: %s' % errstr(e))
# what error could it be !
else:
self.logger.wire("%s %s undefined error reading on socket" % (self.name(),self.peer))
raise NetworkError('Problem while reading data from the network (%s)' % errstr(e))
def writer (self,data):
if not self.io:
# XXX: FIXME: Make sure it does not hold the cleanup during the closing of the peering session
yield True
return
while not self.writing():
yield False
self.logger.wire(LazyFormat("%s %-32s SENDING " % (self.name(),'%s / %s' % (self.local,self.peer)),od,data))
# The first while is here to setup the try/catch block once as it is very expensive
while True:
try:
while True:
if self.defensive and random.randint(0,2):
raise socket.error(errno.EAGAIN,'raising network error in purpose')
# we can not use sendall as in case of network buffer filling
# it does raise and does not let you know how much was sent
nb = self.io.send(data)
if not nb:
self.close()
self.logger.wire("%s %s lost TCP connection with peer" % (self.name(),self.peer))
raise LostConnection('lost the TCP connection')
data = data[nb:]
if not data:
yield True
return
yield False
except socket.error,e:
if e.args[0] in error.block:
self.logger.wire("%s %s blocking io problem mid-way through writing a message %s, trying to complete" % (self.name(),self.peer,errstr(e)),'debug')
yield False
elif e.errno == errno.EPIPE:
# The TCP connection is gone.
self.close()
raise NetworkError('Broken TCP connection')
elif e.args[0] in error.fatal:
self.close()
self.logger.wire("%s %s problem sending message (%s)" % (self.name(),self.peer,errstr(e)))
raise NetworkError('Problem while writing data to the network (%s)' % errstr(e))
# what error could it be !
else:
self.logger.wire("%s %s undefined error writing on socket" % (self.name(),self.peer))
yield False
def reader (self):
# _reader returns the whole number requested or nothing and then stops
for header in self._reader(Message.HEADER_LEN):
if not header:
yield 0,0,'','',None
if not header.startswith(Message.MARKER):
yield 0,0,header,'',NotifyError(1,1,'The packet received does not contain a BGP marker')
return
msg = ord(header[18])
length = unpack('!H',header[16:18])[0]
if length < Message.HEADER_LEN or length > Message.MAX_LEN:
yield length,0,header,'',NotifyError(1,2,'%s has an invalid message length of %d' %(Message().name(msg),length))
return
validator = Message.Length.get(msg,lambda _ : _ >= 19)
if not validator(length):
# MUST send the faulty length back
yield length,0,header,'',NotifyError(1,2,'%s has an invalid message length of %d' %(Message().name(msg),length))
return
number = length - Message.HEADER_LEN
if not number:
yield length,msg,header,'',None
return
for body in self._reader(number):
if not body:
yield 0,0,'','',None
yield length,msg,header,body,None
|
luotao1/Paddle
|
python/paddle/fluid/tests/unittests/tokenizer/__init__.py
|
Python
|
apache-2.0
| 613
| 0.004894
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
KIND, either express or
|
implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
gryzz/uCall
|
utils/asterisk-connector/ami2stomp.py
|
Python
|
gpl-3.0
| 3,524
| 0.004824
|
#!/usr/bin/env python
# vim: set expandtab shiftwidth=4:
# http://www.voip-info.org/wiki/view/asterisk+manager+events
import asterisk.manager
import sys,os,time
import simplejson as json
from stompy.simple import Client
import ConfigParser
from sqlobject import *
from handlers.command_handler_factory import CommandHandlerFactory
from handlers.command_constants import Protocol
#sys.stdout = open("/var/log/requests/connector2.log","a")
#sys.stderr = open("/var/log/requests/connector-err2.log","a")
import logging
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename='/tmp/myapp.log',
filemode='a+')
import fcntl
lockfile = os.path.normpath('/tmp/' + os.path.basename(__file__) + '.lock')
exclusive_lock = open(lockfile, 'w')
try:
fcntl.lockf(exclusive_lock, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print "Another instance is already running, quitting."
time.sleep(1)
sys.exit(-1)
config = ConfigParser.ConfigParser()
config.read('/opt/ucall/etc/config.ini')
stomp_host = config.get('STOMP', 'host')
stomp_username = config.get('STOMP', 'username')
stomp_password = config.get('STOMP', 'password')
print '='*80
print 'Stomp host:', stomp_host
print 'Stomp username:', stomp_username
print 'Stomp password:', stomp_password
print '='*80
ami_host = config.get('AMI', 'host')
ami_username = config.get('AMI', 'username')
ami_password = config.get('AMI', 'password')
print 'AMI host:', ami_host
print 'AMI username:', ami_username
print 'AMI password:', ami_password
print '='*80
sql_dsn = config.get('SQL', 'dsn')
print 'SQL:', sql_dsn
print '='*80
stomp = Client(stomp_host)
stomp.connect(stomp_username, stomp_password)
stomp.agent_channel = 'jms.queue.msg.'
connection = connectionForURI(sql_dsn)
sqlhub.processConnection = connection
manager = asterisk.manager.Manager()
#try:
#try:
manager.connect(ami_host)
manager.login(ami_username, ami_password)
manager.destination = stomp
asteriskProtocolVersion = None
if manager.version == '1.0':
asteriskProtocolVersion = Protocol.ASTERISK_1_0
elif manager.version == '1.1':
asteriskProtocolVersion = Protocol.ASTERISK_1_1
else:
sys.exit()
command_handler = CommandHandlerFactory(asteriskProtocolVersion).create_command_handler()
manager.register_event('Shutdown', command_handler.handle_Shutdown)
manager.register_event('Hangup', command_handler.handle_Hangup)
manager.register_event('Link', command_handler.handle_Link)
manager.register_event('Bridge', command_handler.handle_Bridge)
manager.register_event('Dial', command_handler.handle_Dial)
manager.register_event('Newstate', command_handler.handle_Newstate)
manager.register_event('QueueMemberAdded', command_handler.handle_QueueMemberAdded)
manager.register_event('QueueMemberRemoved', command_handler.handle_QueueMemberRemoved)
manager.register_event('QueueMemberPaused', command_handler.handle_QueueMemberPaused)
manager.register_event('QueueMember', command_handler.handle_QueueMember)
manager.message_loop()
manager.logoff()
#except asterisk.manager.ManagerSocketException, (errno, reason):
# print "Error connecting to the manager: %s" % reason
#except asterisk.
|
manager.ManagerAuthException, reason:
# print "Error logging in to the manager: %s" % reason
#except asterisk.manager.ManagerException, re
|
ason:
# print "Error: %s" % reason
#except:
# sys.exit()
#finally:
manager.close()
|
luizfelippesr/galmag
|
galmag/analysis/visualization.py
|
Python
|
gpl-3.0
| 8,599
| 0.021282
|
import matplotlib.pyplot as plt
import numpy as np
""" Contains functions to facilitate simple ploting tasks """
def std_setup():
""" Adjusts matplotlib default settings"""
from cycler import cycler
plt.rc('image', cmap='viridis')
plt.rc('xtick', labelsize=14)
plt.rc('ytick', labelsize=14)
plt.rc('axes', labelsize=15, titlesize=15)
plt.rcParams['axes.prop_cycle'] = cycler('colo
|
r',['#1f78b4','#a6cee3','#33a02c','#b2df8a',
'#e31a1c','#fb9a99','#ff7f00','#fdbf6f',
'#6a3d9a','#cab2d6'])
plt.rcParams['lines.linewidth'] = 1.65
def plot_r_z_uniform(B,skipr=3,skipz=5, quiver=True, contour=True,
quiver_color = '0.25', cmap='viridis', field_lines=True,
vmin=None, vmax=None, levels=None, **kwargs)
|
:
"""
Plots a r-z slice of the field. Assumes B is created using a cylindrical
grid - for a more sophisticated/flexible plotting script which does not
rely on the grid structure check the plot_slice.
The plot consists of:
1) a coloured contourplot of :math:`B_\phi`
2) quivers showing the x-z projection of the field
Parameters
----------
B : B_field
a B_field or B_field_component object
quiver : bool
If True, shows quivers. Default True
contour : bool
If True, shows contours. Default: True
skipx/skipz : int
Tweaks the display of quivers (Default: skipz=5, skipr=3)
"""
# Requires a cylindrical grid
assert B.grid.grid_type == 'cylindrical'
# Makes a color contour plot
if contour:
CP = plt.contourf(B.grid.r_cylindrical[:,0,:], B.grid.z[:,0,:],
-B.phi[:,0,:], alpha=0.75, vmin=vmin, vmax=vmax,
levels=levels, cmap=cmap)
CB = plt.colorbar(CP, label=r'$B_\phi\,[\mu{{\rm G}}]$',)
plt.setp(CP.collections , linewidth=2)
if quiver:
plt.quiver(B.grid.r_cylindrical[::skipr,0,::skipz], B.grid.z[::skipr,0,::skipz],
B.r_cylindrical[::skipr,0,::skipz],B.z[::skipr,0,::skipz],
color=quiver_color, alpha=0.75, **kwargs)
if field_lines:
x = np.array(B.grid.r_cylindrical[:,0,0])
y = np.array(B.grid.z[0,0,:])
u = -np.array(B.r_cylindrical[:,0,:])
v = -np.array(B.z[:,0,:])
lw = np.sqrt(((B.r_cylindrical[:,0,:])**2+
#(B.phi[:,0,:])**2+
(B.z[:,0,:])**2))
#lw = np.log10(lw)
#lw[lw<0]=0
#lw = lw /lw.max()
#print lw.shape
plt.streamplot(x, y, -u.T, -v.T,color='r',
linewidth=lw.T)
plt.ylim([B.grid.z[:,0,:].min(),
B.grid.z[:,0,:].max()])
plt.xlim([B.grid.r_cylindrical[:,0,:].min(),
B.grid.r_cylindrical[:,0,:].max()])
plt.xlabel(r'$R\,[{{\rm kpc}}]$')
plt.ylabel(r'$z\,[{{\rm kpc}}]$')
def plot_x_z_uniform(B,skipx=1,skipz=5,iy=0, quiver=True, contour=True,
quiver_color='0.25', cmap='viridis',
vmin=None, vmax=None, no_colorbar=False, **kwargs):
"""
Plots a x-z slice of the field. Assumes B is created using a cartesian
grid - for a more sophisticated/flexible plotting script which does not
rely on the grid structure check the plot_slice.
The plot consists of:
1) a coloured contourplot of :math:`B_\phi`
2) quivers showing the x-z projection of the field
Parameters
----------
B : B_field
a B_field or B_field_component object
quiver : bool
If True, shows quivers. Default True
contour : bool
If True, shows contours. Default: True
skipx/skipz : int
Tweaks the display of quivers (Default: skipz=5, skipx=1)
"""
# Requires a Cartesian grid
assert B.grid.grid_type == 'cartesian'
# Makes a color contour plot
if contour:
CP = plt.contourf(B.grid.x[:,iy,:], B.grid.z[:,iy,:], B.phi[:,iy,:],
alpha=0.75, cmap=cmap, vmin=vmin, vmax=vmax)
if not no_colorbar:
CB = plt.colorbar(CP, label=r'$B_\phi\,[\mu{{\rm G}}]$',)
plt.setp(CP.collections , linewidth=2)
if quiver:
plt.quiver(B.grid.x[::skipx,iy,::skipz], B.grid.z[::skipx,iy,::skipz],
B.x[::skipx,iy,::skipz],B.z[::skipx,iy,::skipz],
color=quiver_color, alpha=0.75,**kwargs)
plt.ylim([B.grid.z[:,iy,:].min(),
B.grid.z[:,iy,:].max()])
plt.xlim([B.grid.x[:,iy,:].min(),
B.grid.x[:,iy,:].max()])
plt.xlabel(r'$x\,[{{\rm kpc}}]$')
plt.ylabel(r'$z\,[{{\rm kpc}}]$')
def plot_y_z_uniform(B, skipy=5, skipz=5, ix=0, quiver=True, contour=True,
quiver_color='0.25', cmap='viridis',
vmin=None, vmax=None, **kwargs):
"""
Plots a y-z slice of the field. Assumes B is created using a cartesian
grid - for a more sophisticated/flexible plotting script which does not
rely on the grid structure check the plot_slice.
The plot consists of:
1) a coloured contourplot of :math:`B_\phi`
2) Quivers showing the y-z projection of the field
Parameters
----------
B : B_field
a B_field or B_field_component object
quiver : bool
If True, shows quivers. Default True
contour : bool
If True, shows contours. Default: True
skipy/skipz : int
Tweaks the display of quivers (Default: skipz=5, skipy=5)
"""
# Requires a Cartesian grid
assert B.grid.grid_type == 'cartesian'
# Makes a color contour plot
CP = plt.contourf(B.grid.y[ix,:,:], B.grid.z[ix,:,:], B.phi[ix,:,:],
alpha=0.75, cmap=cmap, vmin=vmin, vmax=vmax)
CB = plt.colorbar(CP, label=r'$B_\phi\,[\mu{{\rm G}}]$',)
plt.setp(CP.collections , linewidth=2)
if quiver:
plt.quiver(B.grid.y[ix,::skipy,::skipz], B.grid.z[ix,::skipy,::skipz],
B.y[ix,::skipy,::skipz],B.z[ix,::skipy,::skipz],
color=quiver_color, alpha=0.75,**kwargs)
plt.ylim([B.grid.z[ix,:,:].min(),
B.grid.z[ix,:,:].max()])
plt.xlim([B.grid.y[ix,:,:].min(),
B.grid.y[ix,:,:].max()])
plt.xlabel(r'$y\,[{{\rm kpc}}]$')
plt.ylabel(r'$z\,[{{\rm kpc}}]$')
def plot_x_y_uniform(B, skipx=5, skipy=5, iz=0, field_lines=True, quiver=True,
vmin=None, vmax=None, contour=True, levels=None,
quiver_color='0.25',cmap='viridis',**kwargs):
"""
Plots a x-y slice of the field. Assumes B is created using a cartesian
grid - for a more sophisticated/flexible plotting script which does not
rely on the grid structure check the plot_slice.
The plot consists of:
1) a coloured contourplot of :math:`|B|^2`
2) Field lines of the :math:`B_x` and :math:`B_y` field
3) Quivers showing the :math:`B_x` and :math:`B_y` field
Parameters
----------
B : B_field
a B_field or B_field_component object
field_lines : bool
If True, shows field lines. Default: True
quiver : bool
If True, shows quivers. Default True
contour : bool
If True, shows contours. Default: True
skipx/skipy : int
Tweaks the display of quivers (Default: skipx=5, skipy=5)
"""
# Requires a Cartesian grid
assert B.grid.grid_type == 'cartesian'
if contour:
CP = plt.contourf(B.grid.x[:,:,iz], B.grid.y[:,:,iz],
np.sqrt(B.x[:,:,iz]**2+B.y[:,:,iz]**2+B.z[:,:,iz]**2),
alpha=0.75, cmap=cmap, vmax=vmax, vmin=vmin,
levels=levels)
CB = plt.colorbar(CP, label=r'$B\,[\mu{{\rm G}}]$',)
plt.setp(CP.collections , linewidth=2)
if field_lines:
plt.streamplot(np.array(B.grid.x[:,0,iz]), np.array(B.grid.y[0,:,iz]),
-np.array(B.y[:,:,iz]), -np.array(B.x[:,:,iz]),color='r')
if quiver:
Bx, By = B.x[::skipx,::skipy,iz],B.y[::skipx,::skipy,iz]
Bx[Bx==0] = np.nan
By[By==0] = np.nan
plt.quiver(B.grid.x[::skipx,::skipy,iz], B.grid.y[::skipx,::skipy,iz],
|
jtopjian/st2
|
st2actions/tests/unit/test_notifier.py
|
Python
|
apache-2.0
| 4,699
| 0.003405
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import mock
import unittest2
import st2tests.config as tests_config
tests_config.parse_args()
from st2actions.no
|
tifier import Notifier
from st2common.constants.triggers import INTERNAL_TRIGGER_TYPES
from st2common.models.db.liveaction import LiveActionDB
from st2common.models.db.notification import NotificationSchema
from st2common.models.db.notification import NotificationSubSchema
from st2common.persistence.action import Action
from st2common.models.system.common import ResourceReference
ACTION_TRIGGER_TYPE = INTERNAL_TRIGGER_TYPES['action'][0]
NOTIFY_TRIGGER_TYPE = I
|
NTERNAL_TRIGGER_TYPES['action'][1]
MOCK_EXECUTION_ID = '287r8383t5BDSVBNVDNBVD'
class NotifierTestCase(unittest2.TestCase):
class MockDispatcher(object):
def __init__(self, tester):
self.tester = tester
self.notify_trigger = ResourceReference.to_string_reference(
pack=NOTIFY_TRIGGER_TYPE['pack'],
name=NOTIFY_TRIGGER_TYPE['name'])
self.action_trigger = ResourceReference.to_string_reference(
pack=ACTION_TRIGGER_TYPE['pack'],
name=ACTION_TRIGGER_TYPE['name'])
def dispatch(self, *args, **kwargs):
try:
self.tester.assertEqual(len(args), 1)
self.tester.assertTrue('payload' in kwargs)
payload = kwargs['payload']
if args[0] == self.notify_trigger:
self.tester.assertEqual(payload['status'], 'succeeded')
self.tester.assertTrue('execution_id' in payload)
self.tester.assertEqual(payload['execution_id'], MOCK_EXECUTION_ID)
self.tester.assertTrue('start_timestamp' in payload)
self.tester.assertTrue('end_timestamp' in payload)
self.tester.assertEqual('core.local', payload['action_ref'])
self.tester.assertEqual('Action succeeded.', payload['message'])
self.tester.assertTrue('data' in payload)
self.tester.assertTrue('run-local-cmd', payload['runner_ref'])
if args[0] == self.action_trigger:
self.tester.assertEqual(payload['status'], 'succeeded')
self.tester.assertTrue('execution_id' in payload)
self.tester.assertEqual(payload['execution_id'], MOCK_EXECUTION_ID)
self.tester.assertTrue('start_timestamp' in payload)
self.tester.assertEqual('core.local', payload['action_name'])
self.tester.assertEqual('core.local', payload['action_ref'])
self.tester.assertTrue('result' in payload)
self.tester.assertTrue('parameters' in payload)
self.tester.assertTrue('run-local-cmd', payload['runner_ref'])
except Exception:
self.tester.fail('Test failed')
@mock.patch.object(Action, 'get_by_ref', mock.MagicMock(
return_value={'runner_type': {'name': 'run-local-cmd'}}))
@mock.patch.object(Notifier, '_get_execution_id', mock.MagicMock(
return_value=MOCK_EXECUTION_ID))
def test_notify_triggers(self):
liveaction = LiveActionDB(action='core.local')
liveaction.description = ''
liveaction.status = 'succeeded'
liveaction.parameters = {}
on_success = NotificationSubSchema(message='Action succeeded.')
on_failure = NotificationSubSchema(message='Action failed.')
liveaction.notify = NotificationSchema(on_success=on_success,
on_failure=on_failure)
liveaction.start_timestamp = datetime.datetime.utcnow()
dispatcher = NotifierTestCase.MockDispatcher(self)
notifier = Notifier(connection=None, queues=[], trigger_dispatcher=dispatcher)
notifier.process(liveaction)
|
cscanlin/munger-builder
|
munger_builder/forms.py
|
Python
|
mit
| 269
| 0.003717
|
from django.contrib.auth.models import Use
|
r
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from django import forms
class UserRegistrationForm(UserCreationForm):
class Meta:
model = User
fields = ('username', 'ema
|
il',)
|
mhugent/Quantum-GIS
|
tests/src/python/test_qgscomposershapes.py
|
Python
|
gpl-2.0
| 3,326
| 0.00391
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsComposerShape.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '(C) 2012 by Dr. Horst Düster / Dr. Marco Hugentobler'
__date__ = '20/08/2012'
__copyright__ = 'Copyright 2012, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import qgis
from PyQt4.QtCore import QFileInfo
from PyQt4.QtXml import QDomDocument
from PyQt4.QtGui import (QPainter, QColor)
from qgis.core import (QgsComposerShape,
QgsRectangle,
QgsComposition,
QgsMapSettings
)
from utilities import (unitTestDataPath,
getQgisTestApp,
TestCase,
unittest,
expectedFailure
)
from qgscompositionchecker import QgsCompositionChecker
QGISAPP, CANVAS, IFACE, PARENT = getQgisTestApp()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsComposerShapes(TestCase):
def __init__(self, methodName):
"""Run once on class initialisation."""
unittest.TestCase.__init__(self, methodName)
self.mapSettings = QgsMapSettings()
# create composition
self.mComposition = QgsComposition(self.mapSettings)
self.mComposition.setPaperSize(297, 210)
self.mComposerShape = QgsComposerShape(20, 20, 150, 100, self.mComposition)
self.mComposerShape.setBackgroundColor(QColor.fromRgb(255, 150, 0))
self.mComposition.addComposerShape(self.mComposerShape)
def testRectangle(self):
"""Test rectangle composer shape"""
self.mComposerShape.setShapeType(QgsComposerShape.Rectangle)
checker = QgsCompositionChecker('composershapes_rectangle', self.mComposition)
myTestResult, myMessage = checker.testComposition()
assert myTestResult == True, myMessage
def testEllipse(self):
"""Test ellipse composer shape"""
self.mComposerShape.setShapeType(QgsComposerShape.Ellipse)
checker = QgsCompositionChecker('composershap
|
es_ellipse', self.mComposition)
myTestResult, myMessage = checker.testComposition()
assert myTestResult == True, myMessage
def testTriangle(self):
"""Test triangle composer shape"""
self.mComposerShape.setShapeType(QgsComposerShape.Triangle)
checker = QgsCompositionChecker('composershapes_triangle', self.mComposition)
myTestResult, myMessage = checker.testComposition()
assert myTestResult == True, myM
|
essage
def testRoundedRectangle(self):
"""Test rounded rectangle composer shape"""
self.mComposerShape.setShapeType(QgsComposerShape.Rectangle)
self.mComposerShape.setCornerRadius(30)
checker = QgsCompositionChecker('composershapes_roundedrect', self.mComposition)
myTestResult, myMessage = checker.testComposition()
self.mComposerShape.setCornerRadius(0)
assert myTestResult == True, myMessage
if __name__ == '__main__':
unittest.main()
|
takeshineshiro/heat
|
heat/scaling/rolling_update.py
|
Python
|
apache-2.0
| 1,960
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def needs_update(targ_capacity, curr_capacity, num_up_to_date):
"""
Return whether there are more batch updates to do.
Inputs are the target size for the group, the current size of the group,
and the number of members that already have the latest definition.
"""
return not (num_up_to_date >= curr_capacity == targ_capacity)
def next_batch(targ_capacity, curr_capacity, num_up_to_date, batch_size,
min_in_service):
"""
Return details of the next batch in a batched update.
The result is a tuple containing the
|
new size of the group and the number
of members that may receive the new definition (by a c
|
ombination of
creating new members and updating existing ones).
Inputs are the target size for the group, the current size of the group,
the number of members that already have the latest definition, the batch
size, and the minimum number of members to keep in service during a rolling
update.
"""
assert num_up_to_date <= curr_capacity
efft_min_sz = min(min_in_service, targ_capacity, curr_capacity)
efft_bat_sz = min(batch_size, max(targ_capacity - num_up_to_date, 0))
new_capacity = efft_bat_sz + max(min(curr_capacity,
targ_capacity - efft_bat_sz),
efft_min_sz)
return new_capacity, efft_bat_sz
|
xbed/Mixly_Arduino
|
mixly_arduino/mpBuild/common/esptool.py-script.py
|
Python
|
apache-2.0
| 442
| 0
|
#!c:\users\fredqian\appdata\local\programs\python
|
\python36-32\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'esptool==2.3.1','console_scripts','esptool.py'
__requires__ = 'esptool==2.3.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('esptool=
|
=2.3.1', 'console_scripts', 'esptool.py')()
)
|
harveyr/thunderbox
|
app/lintblame/git.py
|
Python
|
mit
| 2,301
| 0
|
import subprocess
import re
import os
from app import util
BLAME_NAME_REX = re.compile(r'\(([\w\s]+)\d{4}')
def git_path(path):
"""Returns the top-level git path."""
dir_ = path
if os.path.isfile(path):
dir_ = os.path.split(path)[0]
proc = subprocess.Popen(
['git', 'rev-parse', '--show-toplevel'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=dir_
)
out = proc.communicate()[0]
if out:
return out.strip()
return None
def git_name():
return subprocess.check_output(["git", "config", "user.name"]).strip()
def git_branch(path):
working_dir = path
if not os.path.isdir(path):
working_dir = os.path.split(path)[0]
proc = subprocess.Popen(
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=working_dir
)
out, e
|
rr = proc.communicate()
if err:
return None
return out.strip()
def git_branch_files(path):
path = util.path_dir(path)
if not path:
raise Exception("Bad path: {}".format(path))
top_dir = git_path(path)
proc = subprocess.Popen(
["git", "diff", "--name-only"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=top_dir
)
|
out = proc.communicate()[0]
all_files = set(out.splitlines())
branch = git_branch(path)
if branch != 'master':
proc = subprocess.Popen(
["git", "diff", "--name-only", "master..HEAD"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=path
)
out = proc.communicate()[0]
all_files.update(out.splitlines())
return [os.path.join(top_dir, i) for i in all_files if i]
def blame(path):
working_dir = os.path.split(path)[0]
proc = subprocess.Popen(
['git', 'blame', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=working_dir
)
out, err = proc.communicate()
blame_lines = (out + err).splitlines()
result = {}
for i, line in enumerate(blame_lines):
match = BLAME_NAME_REX.search(line)
if match:
result[i] = match.group(1).strip()
else:
result[i] = None
return result
|
mavarick/spider-python
|
webspider/settings/default_params.py
|
Python
|
gpl-2.0
| 1,332
| 0.012763
|
"""
global params
"""
# url database
default_url_db = {"host": "127.0.0.1",
"port": 3306,
"username": "root",
"password": "",
"database": "liuxf",
"charset": "utf8",
"tablename": "url"}
# proxy database
default_proxy_db = {"host": "127.0.0.1",
"port": 3306,
"username": "root",
"password": "",
"database": "liuxf",
"charset": "utf8",
"tablename": "proxy"}
# Enhanced Queue params
DEFAULT_QUEUE_SIZE = 10
|
00 # default queue size
DEFAULT_QUEUE_TIMEOUT = 0.0001 # default queue timeout
# Links pattern
RE_PATTERN_URL = [
(r'href\s*=\s*(\'|\")(.+?)(\1)', 2), # represent the 2nd brackets,
# in use ,should be result[1]
]
# log config file relative path
TRACE_LOG_CONF = r"./webspider/trace/log.conf"
APP_LOG_PATH = r"./logs/run.log"
LOGGER_KEY = "root"
# Url Queue maxsize
URL_QUEUE_MAX_SIZE = 100
# Opener timeout f
|
or openning one url
URL_OPEN_TIMEOUT = 5
# monitor relative params
# Monitor Info Queue length, which decides exit time when program exits
MONITOR_QUEUE_LEN = 5
# result Queue Max size
RESULT_QUEUE_MAX_SIZE = 100
# get or put timeout for queue
QUEUE_TIMEOUT = 0.001
|
dmazzella/uble
|
micropython-lib/collections/__init__.py
|
Python
|
mit
| 401
| 0
|
try:
from collections import namedtuple, OrderedDict, deque, defaultdict
except ImportError:
try:
from ucollec
|
tions import namedtuple, OrderedDict, deque
except ImportError:
from ucollections import namedtuple, OrderedDict
if "deque" not in globals():
from .deque import deque
if "d
|
efaultdict" not in globals():
from .defaultdict import defaultdict
|
LordDamionDevil/Lony
|
lib/discord/ext/commands/context.py
|
Python
|
gpl-3.0
| 4,638
| 0.001509
|
# -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-2016 Rapptz
Permission is hereby granted, free of
|
charge, to any person obtain
|
ing a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import asyncio
class Context:
"""Represents the context in which a command is being invoked under.
This class contains a lot of meta data to help you understand more about
the invocation context. This class is not created manually and is instead
passed around to commands by passing in :attr:`Command.pass_context`.
Attributes
-----------
message : :class:`discord.Message`
The message that triggered the command being executed.
bot : :class:`Bot`
The bot that contains the command being executed.
args : list
The list of transformed arguments that were passed into the command.
If this is accessed during the :func:`on_command_error` event
then this list could be incomplete.
kwargs : dict
A dictionary of transformed arguments that were passed into the command.
Similar to :attr:`args`\, if this is accessed in the
:func:`on_command_error` event then this dict could be incomplete.
prefix : str
The prefix that was used to invoke the command.
command
The command (i.e. :class:`Command` or its superclasses) that is being
invoked currently.
invoked_with : str
The command name that triggered this invocation. Useful for finding out
which alias called the command.
invoked_subcommand
The subcommand (i.e. :class:`Command` or its superclasses) that was
invoked. If no valid subcommand was invoked then this is equal to
`None`.
subcommand_passed : Optional[str]
The string that was attempted to call a subcommand. This does not have
to point to a valid registered subcommand and could just point to a
nonsense string. If nothing was passed to attempt a call to a
subcommand then this is set to `None`.
"""
def __init__(self, **attrs):
self.message = attrs.pop('message', None)
self.bot = attrs.pop('bot', None)
self.args = attrs.pop('args', [])
self.kwargs = attrs.pop('kwargs', {})
self.prefix = attrs.pop('prefix')
self.command = attrs.pop('command', None)
self.view = attrs.pop('view', None)
self.invoked_with = attrs.pop('invoked_with', None)
self.invoked_subcommand = attrs.pop('invoked_subcommand', None)
self.subcommand_passed = attrs.pop('subcommand_passed', None)
@asyncio.coroutine
def invoke(self, command, *args, **kwargs):
"""|coro|
Calls a command with the arguments given.
This is useful if you want to just call the callback that a
:class:`Command` holds internally.
Note
------
You do not pass in the context as it is done for you.
Parameters
-----------
command : :class:`Command`
A command or superclass of a command that is going to be called.
\*args
The arguments to to use.
\*\*kwargs
The keyword arguments to use.
"""
arguments = []
if command.instance is not None:
arguments.append(command.instance)
if command.pass_context:
arguments.append(self)
arguments.extend(args)
ret = yield from command.callback(*arguments, **kwargs)
return ret
@property
def cog(self):
"""Returns the cog associated with this context's command. None if it does not exist."""
if self.command is None:
return None
return self.command.instance
|
start-jsk/jsk_apc
|
demos/instance_occlsegm/instance_occlsegm_lib/contrib/instance_occlsegm/models/fcn/fcn16s_resnet_occlusion.py
|
Python
|
bsd-3-clause
| 3,904
| 0
|
import chainer
import chainer.functions as F
import chainer.links as L
from chainer_mask_rcnn.models.mask_rcnn_resnet import _copy_persistent_chain
from chainer_mask_rcnn.models.resnet_extractor import _convert_bn_to_affine
from ..resnet import BuildingBlock
from ..resnet import ResNet101Extractor
from ..resnet import ResNet50Extractor
class FCN16sResNetOcclusion(chainer.Chain):
def __init__(self, n_class=21):
self.n_class = n_class
kwargs = {
'initialW': chainer.initializers.Normal(0.01),
}
super(FCN16sResNetOcclusion, self).__init__()
with self.init_scope():
self.extractor = ResNet50Extractor(remove_layers=['res5', 'fc6'])
self.res5 = BuildingBlock(
3, 1024, 512, 2048, stride=1, dilate=2,
initialW=chainer.initializers.Zero(),
)
# head
self.conv6 = L.Convolution2D(2048, 1024, 1, 1, 0, **kwargs)
self.score_fr = L.Convolution2D(1024, n_class, 1, 1, 0, **kwargs)
n_fg_class = n_class - 1
self.score_oc = L.Convolution2D(
1024, n_fg_class, 1, 1, 0, **kwargs
)
_convert_bn_to_affine(self.res5)
self._copy_imagenet_pretrained_resnet(n_layers=50)
def _copy_imagenet_pretrained_resnet(self, n_layers):
if n_layers == 50:
pretrained_model = ResNet50Extractor(pretrained_model='auto')
elif n_layers == 101:
pretrained_model = ResNet101Extractor(pretrained_model='auto')
else:
raise ValueError
self.res5.copyparams(pretrained_model.res5)
_copy_persistent_chain(self.res5, pretrained_model.res5)
def __call__(self, x):
assert x.shape[2] % 16 == 0
assert x.shape[3] % 16 == 0
# conv1 -> bn1 -> res2 -> res3 -> res4
h = self.extractor(x) # 1/16
# res5
h = self.res5(h) # 1/16
assert h.shape[2] == (x.shape[2] / 16)
assert h.shape[3] == (x.shape[3] / 16)
h = self.conv6(h) # 1/16
conv6 = h
# score
h = self.score_fr(conv6) # 1/16
h = F.resize_images(h, x.shape[2:4]) # 1/1
score = h
# score_oc
h = self.score_oc(conv6) # 1/16
h = F.resize_images(h, x.shape[2:4]) # 1/1
score_oc = h
|
return score, score_oc
def predict(self, imgs):
lbls = []
masks_oc = []
for img in imgs:
with chainer.no_backprop_mode(), \
chainer.using_config('train', False):
x = self.xp.asarray(img[None])
score, score_oc = self.__call__(x)
lbl = chainer.functions.argmax(score, axis=1)
prob_oc = chainer.functions.sigmoid(
|
score_oc)
lbl = chainer.cuda.to_cpu(lbl.array[0])
mask_oc = chainer.cuda.to_cpu(prob_oc.array[0] > 0.5)
lbls.append(lbl)
masks_oc.append(mask_oc)
return lbls, masks_oc
class OcclusionSegmentationTrainChain(chainer.Chain):
def __init__(self, predictor, train_occlusion=True):
super(OcclusionSegmentationTrainChain, self).__init__()
with self.init_scope():
self.predictor = predictor
self._train_occlusion = train_occlusion
def __call__(self, x, lbl_vis, lbl_occ):
score_vis, score_occ = self.predictor(x)
loss_vis = F.softmax_cross_entropy(score_vis, lbl_vis)
if self._train_occlusion:
loss_occ = F.sigmoid_cross_entropy(score_occ, lbl_occ)
else:
loss_occ = chainer.Variable(
self.xp.zeros((), dtype=self.xp.float32)
)
loss = loss_vis + loss_occ
chainer.report({
'loss': loss,
'loss_vis': loss_vis,
'loss_occ': loss_occ,
}, self)
return loss
|
shakamunyi/neutron-vrrp
|
neutron/tests/unit/services/metering/drivers/__init__.py
|
Python
|
apache-2.0
| 665
| 0
|
# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
#
# Author: Sylvain Afchain <sylvain.afchain@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
or impli
|
ed. See the
# License for the specific language governing permissions and limitations
# under the License.
|
fengkaicnic/traffic
|
traffic/utils.py
|
Python
|
apache-2.0
| 44,222
| 0.000565
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import contextlib
import datetime
import errno
import functools
import hashlib
import inspect
import os
import pyclbr
import random
import re
import shlex
import shutil
import signal
import socket
import struct
import sys
import tempfile
import time
import uuid
import weakref
from xml.sax import saxutils
from eventlet import event
from eventlet.green import subprocess
from eventlet import greenthread
from eventlet import semaphore
import netaddr
from traffic.openstack.common.gettextutils import _
from traffic.common import deprecated
from traffic import exception
from traffic import flags
from traffic.openstack.common import cfg
from traffic.openstack.common import excutils
from traffic.openstack.common import importutils
from traffic.openstack.common import log as logging
from traffic.openstack.common import timeutils
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
FLAGS.register_opt(
cfg.BoolOpt('disable_process_locking', default=False,
help='Whether to disable inter-process locks'))
def vpn_ping(address, port, timeout=0.05, session_id=None):
"""Sends a vpn negotiation packet and returns the server session.
Returns False on a failure. Basic packet structure is below.
Client packet (14 bytes)::
0 1 8 9 13
+-+--------+-----+
|x| cli_id |?????|
+-+--------+-----+
x = packet identifier 0x38
cli_id = 64 bit identifier
? = unknown, probably flags/padding
Server packet (26 bytes)::
0 1 8 9 13 14 21 2225
+-+--------+-----+--------+----+
|x| srv_id |?????| cli_id |????|
+-+--------+-----+--------+----+
x = packet identifier 0x40
cli_id = 64 bit identifier
? = unknown, probably flags/padding
bit 9 was 1 and the rest were 0 in testing
"""
if session_id is None:
session_id = random.randint(0, 0xffffffffffffffff)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
data = struct.pack('!BQxxxxx', 0x38, session_id)
sock.sendto(data, (address, port))
sock.settimeout(timeout)
try:
received = sock.recv(2048)
except socket.timeout:
return False
finally:
sock.close()
fmt = '!BQxxxxxQxxxx'
if len(received) != struct.calcsize(fmt):
print struct.calcsize(fmt)
return False
(identifier, server_sess, client_sess) = struct.unpack(fmt, received)
if identifier == 0x40 and client_sess == session_id:
return server_sess
def _subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
# non-Python subprocesses expect.
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def execute(*cmd, **kwargs):
"""Helper method to execute command with optional retry.
If you add a run_as_root=True command, don't forget to add the
corresponding filter to etc/traffic/rootwrap.d !
:param cmd: Passed to subprocess.Popen.
:param process_input: Send to opened process.
:param check_exit_code: Single bool, int, or list of allowed exit
codes. Defaults to [0]. Raise
exception.ProcessExecutionError unless
program exits with one of these code.
:param delay_on_retry: True | False. Defaults to True. If set to
True, wait a short amount of time
before retrying.
:param attempts: How many times to retry cmd.
:param run_as_root: True | False. Defaults to False. If set to True,
the command is prefixed by the command specified
in the root_helper FLAG.
:raises exception.TrafficException: on receiving unknown arguments
:raises exception.ProcessExecutionError:
:returns: a tuple, (stdout, stderr) from the spawned process, or None if
the command fails.
"""
process_input = kwargs.pop('process_input', None)
check_exit_code = kwargs.pop('check_exit_code', [0])
ignore_exit_code = False
if isinstance(check_exit_code, bool):
ignore_exit_code = not check_exit_code
check_exit_code = [0]
elif isinstance(check_exit_code, int):
check_exit_code = [check_exit_code]
delay_on_retry = kwargs.pop('delay_on_retry', True)
attempts = kwargs.pop('attempts', 1)
run_as_root = kwargs.pop('run_as_root', False)
shell = kwargs.pop('shell', False)
if len(kwargs):
raise exception.TrafficException(_('Got unknown keyword args '
'to utils.execute: %r') % kwargs)
if run_as_root:
if FLAGS.rootwrap_config is None or FLAGS.root_helper != 'sudo':
deprecated.warn(_('The root_helper option (which lets you specify '
'a root wrapper different from traffic-rootwrap, '
'and defaults to using sudo) is now deprecated. '
'You should use the rootwrap_config option '
'instead.'))
if (FLAGS.rootwrap_config is not None):
cmd = ['sudo', 'traffic-rootwrap', FLAGS.rootwrap_config] + list(cmd)
else:
cmd = shlex.split(FLAGS.root_helper) + list(cmd)
cmd = map(str, cmd)
while attempts > 0:
attempts -= 1
try:
LOG.debug(_('Running cmd
|
(subprocess): %s'), ' '.join(cmd))
_PIPE = subprocess.PIPE # pylint: disable=E1101
obj = subprocess.Popen(cmd,
stdin=_PIPE,
|
stdout=_PIPE,
stderr=_PIPE,
close_fds=True,
preexec_fn=_subprocess_setup,
shell=shell)
result = None
if process_input is not None:
result = obj.communicate(process_input)
else:
result = obj.communicate()
obj.stdin.close() # pylint: disable=E1101
_returncode = obj.returncode # pylint: disable=E1101
LOG.debug(_('Result was %s') % _returncode)
if not ignore_exit_code and _returncode not in check_exit_code:
(stdout, stderr) = result
print ' '.join(cmd)
raise exception.ProcessExecutionError(
exit_code=_returncode,
stdout=stdout,
stderr=stderr,
cmd=' '.join(cmd))
return result
except exception.ProcessExecutionError:
if not attempts:
raise
else:
LOG.debug(_('%r failed. Retrying.'), cmd)
if delay_on_retry:
greenthread.sleep(random.randint(20, 200) / 100.0)
finally:
# NOTE(termie): this appears to be necessary to let the subprocess
# call clean something up in between calls, without
# it two execute calls in a row hangs the second one
greenthread.sleep(0)
def trycmd(*args, **kwargs):
"""
|
zasdfgbnm/qutip
|
qutip/entropy.py
|
Python
|
bsd-3-clause
| 10,172
| 0.000098
|
# This file is part of QuTiP: Quantum Toolbox in Python.
#
# Copyright (c) 2011 and later, Paul D. Nation and Robert J. Johansson.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted pr
|
ovided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the followin
|
g disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names
# of its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
__all__ = ['entropy_vn', 'entropy_linear', 'entropy_mutual', 'negativity',
'concurrence', 'entropy_conditional', 'entangling_power']
from numpy import e, real, sort, sqrt
from scipy import log, log2
from qutip.qobj import ptrace
from qutip.states import ket2dm
from qutip.tensor import tensor
from qutip.operators import sigmay
from qutip.sparse import sp_eigs
from qutip.qip.gates import swap
from qutip.partial_transpose import partial_transpose
def entropy_vn(rho, base=e, sparse=False):
"""
Von-Neumann entropy of density matrix
Parameters
----------
rho : qobj
Density matrix.
base : {e,2}
Base of logarithm.
sparse : {False,True}
Use sparse eigensolver.
Returns
-------
entropy : float
Von-Neumann entropy of `rho`.
Examples
--------
>>> rho=0.5*fock_dm(2,0)+0.5*fock_dm(2,1)
>>> entropy_vn(rho,2)
1.0
"""
if rho.type == 'ket' or rho.type == 'bra':
rho = ket2dm(rho)
vals = sp_eigs(rho.data, rho.isherm, vecs=False, sparse=sparse)
nzvals = vals[vals != 0]
if base == 2:
logvals = log2(nzvals)
elif base == e:
logvals = log(nzvals)
else:
raise ValueError("Base must be 2 or e.")
return float(real(-sum(nzvals * logvals)))
def entropy_linear(rho):
"""
Linear entropy of a density matrix.
Parameters
----------
rho : qobj
sensity matrix or ket/bra vector.
Returns
-------
entropy : float
Linear entropy of rho.
Examples
--------
>>> rho=0.5*fock_dm(2,0)+0.5*fock_dm(2,1)
>>> entropy_linear(rho)
0.5
"""
if rho.type == 'ket' or rho.type == 'bra':
rho = ket2dm(rho)
return float(real(1.0 - (rho ** 2).tr()))
def concurrence(rho):
"""
Calculate the concurrence entanglement measure for a two-qubit state.
Parameters
----------
state : qobj
Ket, bra, or density matrix for a two-qubit state.
Returns
-------
concur : float
Concurrence
References
----------
.. [1] http://en.wikipedia.org/wiki/Concurrence_(quantum_computing)
"""
if rho.isket and rho.dims != [[2, 2], [1, 1]]:
raise Exception("Ket must be tensor product of two qubits.")
elif rho.isbra and rho.dims != [[1, 1], [2, 2]]:
raise Exception("Bra must be tensor product of two qubits.")
elif rho.isoper and rho.dims != [[2, 2], [2, 2]]:
raise Exception("Density matrix must be tensor product of two qubits.")
if rho.isket or rho.isbra:
rho = ket2dm(rho)
sysy = tensor(sigmay(), sigmay())
rho_tilde = (rho * sysy) * (rho.conj() * sysy)
evals = rho_tilde.eigenenergies()
# abs to avoid problems with sqrt for very small negative numbers
evals = abs(sort(real(evals)))
lsum = sqrt(evals[3]) - sqrt(evals[2]) - sqrt(evals[1]) - sqrt(evals[0])
return max(0, lsum)
def negativity(rho, subsys, method='tracenorm', logarithmic=False):
"""
Compute the negativity for a multipartite quantum system described
by the density matrix rho. The subsys argument is an index that
indicates which system to compute the negativity for.
.. note::
Experimental.
"""
mask = [idx == subsys for idx, n in enumerate(rho.dims[0])]
rho_pt = partial_transpose(rho, mask)
if method == 'tracenorm':
N = ((rho_pt.dag() * rho_pt).sqrtm().tr().real - 1)/2.0
elif method == 'eigenvalues':
l = rho_pt.eigenenergies()
N = ((abs(l)-l)/2).sum()
else:
raise ValueError("Unknown method %s" % method)
if logarithmic:
return log2(2 * N + 1)
else:
return N
def entropy_mutual(rho, selA, selB, base=e, sparse=False):
"""
Calculates the mutual information S(A:B) between selection
components of a system density matrix.
Parameters
----------
rho : qobj
Density matrix for composite quantum systems
selA : int/list
`int` or `list` of first selected density matrix components.
selB : int/list
`int` or `list` of second selected density matrix components.
base : {e,2}
Base of logarithm.
sparse : {False,True}
Use sparse eigensolver.
Returns
-------
ent_mut : float
Mutual information between selected components.
"""
if isinstance(selA, int):
selA = [selA]
if isinstance(selB, int):
selB = [selB]
if rho.type != 'oper':
raise TypeError("Input must be a density matrix.")
if (len(selA) + len(selB)) != len(rho.dims[0]):
raise TypeError("Number of selected components must match " +
"total number.")
rhoA = ptrace(rho, selA)
rhoB = ptrace(rho, selB)
out = (entropy_vn(rhoA, base, sparse=sparse) +
entropy_vn(rhoB, base, sparse=sparse) -
entropy_vn(rho, base, sparse=sparse))
return out
def _entropy_relative(rho, sigma, base=e, sparse=False):
"""
****NEEDS TO BE WORKED ON****
Calculates the relative entropy S(rho||sigma) between two density
matrices.
Parameters
----------
rho : qobj
First density matrix.
sigma : qobj
Second density matrix.
base : {e,2}
Base of logarithm.
Returns
-------
rel_ent : float
Value of relative entropy.
"""
if rho.type != 'oper' or sigma.type != 'oper':
raise TypeError("Inputs must be density matrices..")
# sigma terms
svals = sp_eigs(sigma.data, sigma.isherm, vecs=False, sparse=sparse)
snzvals = svals[svals != 0]
if base == 2:
slogvals = log2(snzvals)
elif base == e:
slogvals = log(snzvals)
else:
raise ValueError("Base must be 2 or e.")
# rho terms
rvals = sp_eigs(rho.data, rho.isherm, vecs=False, sparse=sparse)
rnzvals = rvals[rvals != 0]
# calculate tr(rho*log sigma)
rel_trace = float(real(sum(rnzvals * slogvals)))
return -entropy_vn(rho, base, sparse) - rel_trace
def entropy_conditional(rho, selB, base=e, sparse=False):
"""
Calculates the conditional entropy :math:`S(A|B)=S(A,B)-S(B)`
of a selected density matrix component.
Parameters
----------
rho : qobj
Density matrix of composite object
selB : int/list
Selected components
|
ScreamingUdder/mantid
|
scripts/HFIR_4Circle_Reduction/detector2dview.py
|
Python
|
gpl-3.0
| 14,100
| 0.002199
|
#pylint: disable=W0403,R0902,R0903,R0904,W0212
from __future__ import (absolute_import, division, print_function)
from HFIR_4Circle_Reduction import mpl2dgraphicsview
from PyQt4 import QtCore
import numpy as np
import os
class Detector2DView(mpl2dgraphicsview.Mpl2dGraphicsView):
"""
Customized 2D detector view
"""
class MousePress(object):
RELEASED = 0
LEFT = 1
RIGHT = 3
newROIDefinedSignal = QtCore.pyqtSignal(int, int, int, int) # return coordinate of the
def __init__(self, parent):
"""
:param parent:
:return:
"""
mpl2dgraphicsview.Mpl2dGraphicsView.__init__(self, parent)
# connect the mouse motion to interact with the canvas
self._myCanvas.mpl_connect('button_press_event', self.on_mouse_press_event)
self._myCanvas.mpl_connect('button_release_event', self.on_mouse_release_event)
self._myCanvas.mpl_connect('motion_notify_event', self.on_mouse_motion)
# class variables
self._myPolygon = None # matplotlib.patches.Polygon
# class status variables
self._roiSelectMode = False
# region of interest. None or 2 tuple of 2-tuple for upper left corner and lower right corner
# mouse positions as start and end
self._roiStart = None
self._roiEnd = None
# mouse
self._mousePressed = Detector2DView.MousePress.RELEASED
# mouse position and resolution
self._currX = 0.
self._currY = 0.
self._resolutionX = 0.005
self._resolutionY = 0.005
# parent window
self._myParentWindow = None
return
# def add_roi(self, roi_start, roi_end):
# """ Add region of interest
# :param roi_start:
# :param roi_end:
# :return:
# """
# # check
# assert isinstance(roi_start, tuple) and len(roi_start) == 2
# assert isinstance(roi_end, tuple) and len(roi_end) == 2
#
# # set
# self._roiStart = roi_start
# self._roiEnd = roi_end
#
# # plot
# self.plot_roi()
#
# return
def clear_canvas(self):
"""
clear canvas (override base class)
:return:
"""
# clear the current record
self._myPolygon = None
# reset mouse selection ROI
# set
self._roiStart = None
self._roiEnd = None
# call base class
super(Detector2DView, self).clear_canvas()
return
def enter_roi_mode(self, state):
"""
Enter or leave the region of interest (ROI) selection mode
:return:
"""
assert isinstance(state, bool), 'blabla'
# set
self._roiSelectMode = state
if state:
# new in add-ROI mode
self.remove_roi()
else:
# reset roi start and roi end
self._roiStart = None
self._roiEnd = None
# # reset _myPolygen
# if state is False:
# if self._myPolygon is not None:
# self.remove_roi()
return
def integrate_roi_linear(self, exp_number, scan_number, pt_number, output_dir):
"""
integrate the 2D data inside region of interest along both axis-0 and axis-1 individually.
and the result (as 1D data) will be saved to ascii file.
the X values will be the corresponding pixel index either along axis-
|
0 or axis-1
:return:
"""
def save_to_file(base_file_name, axis, array1d, start_index):
"""
save the result (1D data
|
) to an ASCII file
:param base_file_name:
:param axis:
:param array1d:
:param start_index:
:return:
"""
file_name = '{0}_axis_{1}.dat'.format(base_file_name, axis)
wbuf = ''
vec_x = np.arange(len(array1d)) + start_index
for x, d in zip(vec_x, array1d):
wbuf += '{0} \t{1}\n'.format(x, d)
ofile = open(file_name, 'w')
ofile.write(wbuf)
ofile.close()
return
matrix = self.array2d
assert isinstance(matrix, np.ndarray), 'A matrix must be an ndarray but not {0}.'.format(type(matrix))
# get region of interest
if self._roiStart is None:
self._roiStart = (0, 0)
if self._roiEnd is None:
self._roiEnd = matrix.shape
ll_row = min(self._roiStart[0], self._roiEnd[0])
ll_col = min(self._roiStart[1], self._roiEnd[1])
ur_row = max(self._roiStart[0], self._roiEnd[0])
ur_col = max(self._roiStart[1], self._roiEnd[1])
#roi_matrix = matrix[ll_col:ur_col, ll_row:ur_row]
#sum_0 = roi_matrix.sum(0)
#sum_1 = roi_matrix.sum(1)
roi_matrix = matrix[ll_col:ur_col, ll_row:ur_row]
sum_0 = roi_matrix.sum(0)
sum_1 = roi_matrix.sum(1)
# write to file
base_name = os.path.join(output_dir, 'Exp{0}_Scan{1}_Pt{2}'.format(exp_number, scan_number, pt_number))
save_to_file(base_name, 0, sum_0, ll_row)
save_to_file(base_name, 1, sum_1, ll_col)
message = 'Integrated values are saved to {0}...'.format(base_name)
return message
@property
def is_roi_selection_drawn(self):
"""
whether ROI is drawn
:return:
"""
is_drawn = not (self._myPolygon is None)
return is_drawn
def get_roi(self):
"""
:return: A list for polygon0
"""
assert self._roiStart is not None
assert self._roiEnd is not None
# rio start is upper left, roi end is lower right
lower_left_x = min(self._roiStart[0], self._roiEnd[0])
lower_left_y = min(self._roiStart[1], self._roiEnd[1])
lower_left = lower_left_x, lower_left_y
# ROI upper right
upper_right_x = max(self._roiStart[0], self._roiEnd[0])
upper_right_y = max(self._roiStart[1], self._roiEnd[1])
upper_right = upper_right_x, upper_right_y
return lower_left, upper_right
def plot_detector_counts(self, raw_det_data):
"""
plot detector counts as 2D plot
:param raw_det_data:
:return:
"""
x_min = 0
x_max = raw_det_data.shape[0]
y_min = 0
y_max = raw_det_data.shape[1]
count_plot = self.add_plot_2d(raw_det_data, x_min=x_min, x_max=x_max, y_min=y_min, y_max=y_max,
hold_prev_image=False)
if self._myPolygon is not None:
print ('[DB...BAT...] Add PATCH')
self._myCanvas.add_patch(self._myPolygon)
else:
print ('[DB...BAT...] NO PATCH')
print ('[DB...BAT...AFTER] ROI Rect: {0}. 2D plot: {1}'.format(self._myPolygon, count_plot))
return
def plot_roi(self):
""" Plot region of interest (as rectangular) to the canvas from the region set from
:return:
"""
# check
# TODO FIXME - Fill blabla
assert self._roiStart is not None, 'blabla'
assert self._roiEnd is not None, 'blabla'
# create a vertex list of a rectangular
vertex_array = np.ndarray(shape=(4, 2))
# upper left corner
vertex_array[0][0] = self._roiStart[0]
vertex_array[0][1] = self._roiStart[1]
# lower right corner
vertex_array[2][0] = self._roiEnd[0]
vertex_array[2][1] = self._roiEnd[1]
# upper right corner
vertex_array[1][0] = self._roiEnd[0]
vertex_array[1][1] = self._roiStart[1]
# lower left corner
vertex_array[3][0] = self._roiStart[0]
vertex_array[3][1] = self._roiEnd[1]
# register
if self._myPolygon is not None:
self._myPolygon.remove()
self._myPolygon = None
self._myPolygon = self._myCanvas.plot_polygon(vertex_array, fill=False, color='w')
return
def remove_roi(self):
"""
Remove the rectangular for region of interest
:return:
"""
|
minhphung171093/GreenERP
|
openerp/addons/base/res/res_config.py
|
Python
|
gpl-3.0
| 31,737
| 0.003624
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from operator import attrgetter
import re
import openerp
from openerp import SUPERUSER_ID
from openerp.osv import osv, fields
from openerp.tools import ustr
from openerp.tools.translate import _
from openerp import exceptions
from lxml import etree
from openerp.excepti
|
ons import UserError
_logger = logging.getLogger(__name__)
class res_config_module_installation_mixin(object):
def _install_modules(self, cr, uid, modules, context):
"""Install the requested modules.
return the next action to execute
modules is a list of tuples
(m
|
od_name, browse_record | None)
"""
ir_module = self.pool.get('ir.module.module')
to_install_ids = []
to_install_missing_names = []
for name, module in modules:
if not module:
to_install_missing_names.append(name)
elif module.state == 'uninstalled':
to_install_ids.append(module.id)
result = None
if to_install_ids:
result = ir_module.button_immediate_install(cr, uid, to_install_ids, context=context)
#FIXME: if result is not none, the corresponding todo will be skipped because it was just marked done
if to_install_missing_names:
return {
'type': 'ir.actions.client',
'tag': 'apps',
'params': {'modules': to_install_missing_names},
}
return result
class res_config_configurable(osv.osv_memory):
''' Base classes for new-style configuration items
Configuration items should inherit from this class, implement
the execute method (and optionally the cancel one) and have
their view inherit from the related res_config_view_base view.
'''
_name = 'res.config'
def _next_action(self, cr, uid, context=None):
Todos = self.pool['ir.actions.todo']
_logger.info('getting next %s', Todos)
active_todos = Todos.browse(cr, uid,
Todos.search(cr, uid, ['&', ('type', '=', 'automatic'), ('state','=','open')]),
context=context)
user_groups = set(map(
lambda g: g.id,
self.pool['res.users'].browse(cr, uid, [uid], context=context)[0].groups_id))
valid_todos_for_user = [
todo for todo in active_todos
if not todo.groups_id or bool(user_groups.intersection((
group.id for group in todo.groups_id)))
]
if valid_todos_for_user:
return valid_todos_for_user[0]
return None
def _next(self, cr, uid, context=None):
_logger.info('getting next operation')
next = self._next_action(cr, uid, context=context)
_logger.info('next action is %s', next)
if next:
return next.action_launch(context=context)
return {
'type': 'ir.actions.client',
'tag': 'reload',
}
def start(self, cr, uid, ids, context=None):
return self.next(cr, uid, ids, context)
def next(self, cr, uid, ids, context=None):
""" Returns the next todo action to execute (using the default
sort order)
"""
return self._next(cr, uid, context=context)
def execute(self, cr, uid, ids, context=None):
""" Method called when the user clicks on the ``Next`` button.
Execute *must* be overloaded unless ``action_next`` is overloaded
(which is something you generally don't need to do).
If ``execute`` returns an action dictionary, that action is executed
rather than just going to the next configuration item.
"""
raise NotImplementedError(
'Configuration items need to implement execute')
def cancel(self, cr, uid, ids, context=None):
""" Method called when the user click on the ``Skip`` button.
``cancel`` should be overloaded instead of ``action_skip``. As with
``execute``, if it returns an action dictionary that action is
executed in stead of the default (going to the next configuration item)
The default implementation is a NOOP.
``cancel`` is also called by the default implementation of
``action_cancel``.
"""
pass
def action_next(self, cr, uid, ids, context=None):
""" Action handler for the ``next`` event.
Sets the status of the todo the event was sent from to
``done``, calls ``execute`` and -- unless ``execute`` returned
an action dictionary -- executes the action provided by calling
``next``.
"""
next = self.execute(cr, uid, ids, context=context)
if next: return next
return self.next(cr, uid, ids, context=context)
def action_skip(self, cr, uid, ids, context=None):
""" Action handler for the ``skip`` event.
Sets the status of the todo the event was sent from to
``skip``, calls ``cancel`` and -- unless ``cancel`` returned
an action dictionary -- executes the action provided by calling
``next``.
"""
next = self.cancel(cr, uid, ids, context=context)
if next: return next
return self.next(cr, uid, ids, context=context)
def action_cancel(self, cr, uid, ids, context=None):
""" Action handler for the ``cancel`` event. That event isn't
generated by the res.config.view.base inheritable view, the
inherited view has to overload one of the buttons (or add one
more).
Sets the status of the todo the event was sent from to
``cancel``, calls ``cancel`` and -- unless ``cancel`` returned
an action dictionary -- executes the action provided by calling
``next``.
"""
next = self.cancel(cr, uid, ids, context=context)
if next: return next
return self.next(cr, uid, ids, context=context)
class res_config_installer(osv.osv_memory, res_config_module_installation_mixin):
""" New-style configuration base specialized for addons selection
and installation.
Basic usage
-----------
Subclasses can simply define a number of _columns as
fields.boolean objects. The keys (column names) should be the
names of the addons to install (when selected). Upon action
execution, selected boolean fields (and those only) will be
interpreted as addons to install, and batch-installed.
Additional addons
-----------------
It is also possible to require the installation of an additional
addon set when a specific preset of addons has been marked for
installation (in the basic usage only, additionals can't depend on
one another).
These additionals are defined through the ``_install_if``
property. This property is a mapping of a collection of addons (by
name) to a collection of addons (by name) [#]_, and if all the *key*
addons are selected for installation, then the *value* ones will
be selected as well. For example::
_install_if = {
('sale','crm'): ['sale_crm'],
}
This will install the ``sale_crm`` addon if and only if both the
``sale`` and ``crm`` addons are selected for installation.
You can define as many additionals as you wish, and additionals
can overlap in key and value. For instance::
_install_if = {
('sale','crm'): ['sale_crm'],
('sale','project'): ['sale_service'],
}
will install both ``sale_crm`` and ``sale_service`` if all of
``sale``, ``crm`` and ``project`` are selected for installation.
Hook methods
------------
Subclasses might also need to express dependencies more complex
than that provided by additionals. In this case, it's possible to
define methods of the form ``_if_%(name)s`` where ``name`` is the
name of a boolean field. If the field is selected, then the
corresponding module will be marked for installation *and* the
hook method will be executed.
Hook methods take the usual set of paramete
|
pjuu/pjuu
|
pjuu/auth/backend.py
|
Python
|
agpl-3.0
| 14,374
| 0
|
# -*- coding: utf8 -*-
"""Simple auth functions with access to the databases for use in the views.
:license: AGPL v3, see LICENSE for more details
:copyright: 2014-2021 Joe Doherty
"""
# Stdlib imports
from datetime import datetime
import re
# 3rd party imports
from flask import session
from pymongo.errors import DuplicateKeyError
from werkzeug.security import (generate_password_hash as generate_password,
check_password_hash as check_password)
# Pjuu imports
from pjuu import mongo as m, redis as r, storage
from pjuu.auth.utils import get_user
from pjuu.lib import keys as k, timestamp, get_uuid
from pjuu.posts.backend import delete_post
# Username & E-mail checker re patterns
USERNAME_PATTERN = r'^\w{3,16}$'
EMAIL_PATTERN = r'^[^@%!/|`#&?]+@[^.@%!/|`#&?][^@%!/|`#&?]*\.[a-z]{2,10}$'
# Usable regular expression objects
USERNAME_RE = re.compile(USERNAME_PATTERN)
EMAIL_RE = re.compile(EMAIL_PATTERN)
# TODO: Come up with a better solution for this.
# Reserved names
# Before adding a name here ensure that no one is using it.
# Names here DO NOT have to watch the pattern for usernames as these may change
# in the future. We need to protect endpoints which we need and can not afford
# to give to users.
RESERVED_NAMES = [
'about', 'about_us', 'aboutus', 'access', 'account', 'accounts',
'activate', 'add', 'address', 'adm', 'admin', 'administration', 'ajax',
'alert', 'alerts', 'analytics', 'api', 'app', 'apps', 'archive', 'auth',
'authentication', 'avatar', 'billing', 'bin', 'blog', 'blogs', 'cache',
'calendar', 'careers', 'cgi', 'chat', 'client', 'code', 'config',
'connect', 'contact', 'contest', 'create', 'css', 'dashboard', 'data',
'db', 'delete', 'design', 'dev', 'devel', 'dir', 'directory', 'doc',
'docs', 'domain', 'download', 'downloads', 'downvote', 'ecommerce', 'edit',
'editor', 'email', 'faq', 'favorite', 'feed', 'feedback', 'file', 'files',
'find', 'flog', 'follow', 'followers', 'following', 'forgot', 'forum',
'forums', 'group', 'groups', 'has-alerts', 'hasalerts', 'help', 'home',
'homepage', 'host', 'hosting', 'hostname', 'hpg', 'html', 'http', 'httpd',
'https', 'i-has-alerts', 'ihasalerts', 'image', 'images', 'imap', 'img',
'index', 'info', 'information', 'invite', 'java', 'javascript', 'job',
'jobs', 'js', 'list', 'lists', 'log', 'login', 'logout', 'logs', 'mail',
'master', 'media', 'message', 'messages', 'name', 'net', 'network', 'new',
'news', 'newsletter', 'nick', 'nickname', 'notes', 'order', 'orders',
'page', 'pager', 'pages', 'password', 'photo', 'photos', 'php', 'pic',
'pics', 'pjuu', 'plugin', 'plugins', 'p
|
ost', 'posts', 'privacy',
'privacy_policy', 'privacypolicy', 'profile', 'project', 'p
|
rojects', 'pub',
'public', 'random', 'recover', 'register', 'registration', 'report',
'reset', 'root', 'rss', 'script', 'scripts', 'search', 'secure',
'security', 'send', 'service', 'setting', 'settings', 'setup', 'signin',
'signup', 'singout', 'site', 'sitemap', 'sites', 'ssh', 'stage', 'staging',
'start', 'stat', 'static', 'stats', 'status', 'store', 'stores',
'subdomain', 'subscribe', 'support', 'system', 'tablet', 'talk', 'task',
'tasks', 'template', 'templatestest', 'terms', 'terms_and_conditions',
'terms_of_service', 'termsandconditions', 'termsofservice', 'tests',
'theme', 'themes', 'tmp', 'todo', 'tools', 'unfollow', 'update', 'upload',
'upvote', 'url', 'usage', 'user', 'username', 'video', 'videos', 'web',
'webmail']
def create_account(username, email, password):
"""Creates a new user account.
:param username: The new users user name
:type username: str
:param email: The new users e-mail address
:type email: str
:param password: The new users password un-hashed
:type password: str
:returns: The UID of the new user
:rtype: str or None
"""
username = username.lower()
email = email.lower()
try:
if check_username(username) and check_username_pattern(username) and \
check_email(email) and check_email_pattern(email):
# Get a new UUID for the user
uid = get_uuid()
user = {
'_id': uid,
'username': username.lower(),
'email': email.lower(),
'password': generate_password(password,
method='pbkdf2:sha256:2000',
salt_length=20),
'created': timestamp(),
'last_login': -1,
'active': False,
'banned': False,
'op': False,
'muted': False,
'about': "",
'score': 0,
'alerts_last_checked': -1,
# Set the TTL for a newly created user, this has to be Datetime
# object for MongoDB to recognise it. This is removed on
# activation.
'ttl': datetime.utcnow()
}
# Set all the tips for new users
for tip_name in k.VALID_TIP_NAMES:
user['tip_{}'.format(tip_name)] = True
# Insert the new user in to Mongo. If this fails a None will be
# returned
result = m.db.users.insert(user)
return uid if result else None
except DuplicateKeyError: # pragma: no cover
# Oh no something went wrong. Pass over it. A None will be returned.
pass
return None
def check_username_pattern(username):
"""Check that username matches what we class as a username
:param username: The username to test the pattern of
:type username: str
:returns: True if successful match, False otherwise
:rtype: bool
"""
# Check the username is valid
return bool(USERNAME_RE.match(username.lower()))
def check_username(username):
"""Check for username availability
:param username: The username to check for existence
:type username: str
:returns: True is the username does NOT exist, False otherwise
:rtype: bool
"""
return username not in RESERVED_NAMES and \
not bool(m.db.users.find_one({'username': username.lower()}, {}))
def check_email_pattern(email):
"""Checks that email matches what we class as an email address
:param email: The email to test the pattern of
:type email: str
:returns: True if successful match, False otherwise
:rtype: bool
"""
return bool(EMAIL_RE.match(email.lower()))
def check_email(email):
"""Check an e-mail addresses availability
:param email: The email to check for existence
:type email: str
:returns: True if the email does NOT exist, False otherwise
:rtype: bool
"""
return not bool(m.db.users.find_one({'email': email.lower()}, {}))
def user_exists(user_id):
"""Is there a user object with `user_id`?
"""
return bool(m.db.users.find_one({'_id': user_id}, {}))
def authenticate(username, password):
"""Authenticate a username/password combination.
"""
# Case-insensitive login
username = username.lower()
if '@' in username:
user = m.db.users.find_one({'email': username})
else:
user = m.db.users.find_one({'username': username})
# Check that we got a result and that the password matches the stored one
if user and check_password(user.get('password'), password):
# If it matched return the document
return user
# Oh no, something went wrong
return None
def signin(user_id):
"""Logs the user with uid in by adding the uid to the session.
"""
session['user_id'] = user_id
# update last login
m.db.users.update({'_id': user_id}, {'$set': {'last_login': timestamp()}})
def signout():
"""Removes the user id from the session.
"""
session.pop('user_id', None)
def activate(user_id, action=True):
"""Activates a user account and removes 'ttl' key from Mongo
"""
return m.db.users.update(
{'_id': user_id},
{'$set': {'active': action}, '$unset': {'ttl': None}}
)
|
alby128/syncplay
|
buildPy2exe.py
|
Python
|
apache-2.0
| 31,233
| 0.002964
|
#!/usr/bin/env python3
#coding:utf8
# *** TROUBLESHOOTING ***
# 1) If you get the error "ImportError: No module named zope.interface" then add an empty __init__.py file to the PYTHONDIR/Lib/site-packages/zope directory
# 2) It is expected that you will have NSIS 3 NSIS from http://nsis.sourceforge.net installed.
import codecs
import sys
# try:
# if (sys.version_info.major != 2) or (sys.version_info.minor < 7):
# raise Exception("You must build Syncplay with Python 2.7!")
# except AttributeError:
# import warnings
# warnings.warn("You must build Syncplay with Python 2.7!")
from glob import glob
import os
import subprocess
from string import Template
from distutils.core import setup
try:
from py2exe.build_exe import py2exe
except ImportError:
from py2exe.distutils_buildexe import py2exe
import syncplay
from syncplay.messages import getMissingStrings
missingStrings = getMissingStrings()
if missingStrings is not None and missingStrings != "":
import warnings
warnings.warn("MISSING/UNUSED STRINGS DETECTED:\n{}".format(missingStrings))
def get_nsis_path():
bin_name = "makensis.exe"
from winreg import HKEY_LOCAL_MACHINE as HKLM
from winreg import KEY_READ, KEY_WOW64_32KEY, OpenKey, QueryValueEx
try:
nsisreg = OpenKey(HKLM, "Software\\NSIS", 0, KEY_READ | KEY_WOW64_32KEY)
if QueryValueEx(nsisreg, "VersionMajor")[0] >= 3:
return "{}\\{}".format(QueryValueEx(nsisreg, "")[0], bin_name)
else:
raise Exception("You must install NSIS 3 or later.")
except WindowsError:
return bin_name
NSIS_COMPILE = get_nsis_path()
OUT_DIR = "syncplay_v{}".format(syncplay.version)
SETUP_SCRIPT_PATH = "syncplay_setup.nsi"
NSIS_SCRIPT_TEMPLATE = r"""
!include LogicLib.nsh
!include nsDialogs.nsh
!include FileFunc.nsh
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\English.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Polish.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Russian.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\German.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Italian.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Spanish.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\PortugueseBR.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Portuguese.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Turkish.nlf"
Unicode true
Name "Syncplay $version"
OutFile "Syncplay-$version-Setup.exe"
InstallDir $$PROGRAMFILES\Syncplay
RequestExecutionLevel admin
ManifestDPIAware false
XPStyle on
Icon syncplay\resources\icon.ico ;Change DIR
SetCompressor /SOLID lzma
VIProductVersion "$version.0"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_POLISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_POLISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_POLISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_POLISH} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_SPANISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_SPANISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_SPANISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_SPANISH} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_TURKISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_TURKISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_TURKISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_TURKISH} "FileDescription" "Syncplay"
LangString ^SyncplayLanguage $${LANG_ENGLISH} "en"
LangString ^Associate $${LANG_ENGLISH} "Associate Syncplay with multimedia files."
LangString ^Shortcut $${LANG_ENGLISH} "Create Shortcuts in following locations:"
LangString ^StartMenu $${LANG_ENGLISH} "Start Menu"
LangString ^Desktop $${LANG_ENGLISH} "Desktop"
LangString ^QuickLaunchBar $${LANG_ENGLISH} "Quick Launch Bar"
LangString ^AutomaticUpdates $${LANG_ENGLISH} "Check for updates automatically"
LangString ^UninstConfig $${LANG_ENGLISH} "Delete configuration file."
LangString ^SyncplayLanguage $${LANG_POLISH} "pl"
LangString ^Associate $${LANG_POLISH} "Skojarz Syncplaya z multimediami"
LangString ^Shortcut $${LANG_POLISH} "Utworz skroty w nastepujacych miejscach:"
LangString ^StartMenu $${LANG_POLISH} "Menu Start"
LangString ^Desktop $${LANG_POLISH} "Pulpit"
LangString ^QuickLaunchBar $${LANG_POLISH} "Pasek s
|
zybkiego uruchamiania"
LangString ^UninstConfig $${LANG_POLISH} "Usun plik konfiguracyjny."
LangString ^SyncplayLanguage $${LANG_RUSSIAN} "ru"
LangString ^Associate $${LANG_RUSSIAN} "Ассоциировать Syncplay с видеофайлами"
LangString ^Shortcut $${LANG_RUSSIAN} "Создать ярлыки:"
LangString ^StartMenu $${LANG_RUSSIAN} "в меню Пуск"
LangString ^Desktop $${LANG_RUSSIAN} "на рабочем столе"
LangString ^QuickLaunchBar $${LANG_RUSSIAN} "в меню быстро
|
го запуска"
LangString ^AutomaticUpdates $${LANG_RUSSIAN} "Проверять обновления автоматически"; TODO: Confirm Russian translation ("Check for updates automatically")
LangString ^UninstConfig $${LANG_RUSSIAN} "Удалить файл настроек."
LangString ^SyncplayLanguage $${LANG_GERMAN} "de"
LangString ^Associate $${LANG_GERMAN} "Syncplay als Standardprogramm für Multimedia-Dateien verwenden."
LangString ^Shortcut $${LANG_GERMAN} "Erstelle Verknüpfungen an folgenden Orten:"
LangString ^StartMenu $${LANG_GERMAN} "Startmenü"
LangString ^Desktop $${LANG_GERMAN} "Desktop"
LangString ^QuickLaunchBar $${LANG_GERMAN} "Schnellstartleiste"
LangString ^AutomaticUpdates $${LANG_GERMAN} "Automatisch nach Updates suchen";
LangString ^UninstConfig $${LANG_GERMAN} "Konfigurationsdatei löschen."
LangString ^SyncplayLanguage $${LANG_ITALIAN} "it"
LangString ^Associate $${LANG_ITALIAN} "Associa Syncplay con i file multimediali."
LangString ^Shortcut $${LANG_ITALIAN} "Crea i collegamenti nei percorsi seguenti:"
LangString ^StartMenu $${LANG_ITALIAN} "Menu Start"
LangString ^Desktop $${LANG_ITALIAN} "Desktop"
LangString ^QuickLaunchBar $${LANG_ITALIAN} "Barra di avvio rapido"
LangString ^AutomaticUpdates $${LANG_ITALIAN} "Controllo automatico degli aggiornamenti"
LangString ^UninstConfig $${LANG_ITALIAN} "Cancella i file di configurazione."
LangString ^SyncplayLanguage $${LANG_SPANISH} "es"
LangString ^Associate $${LANG_SPANISH} "Asociar Syncplay con archivos multimedia."
LangString ^Shortcut $${LANG_SPANISH} "Crear accesos directos en las siguientes ubicaciones:"
LangString ^StartMenu $${LANG_SPANISH} "Menú de inicio"
LangString ^Desktop $${LANG_SPANISH} "Escritorio"
LangString ^QuickLaunchBar $${LANG_SPANISH} "Barra de acceso rápido"
LangString ^AutomaticUpdates $${LANG_SPANISH}
|
jnez71/demos
|
methods/dynpro_queens.py
|
Python
|
mit
| 2,894
| 0.00311
|
#!/usr/bin/env python3
"""
The 8-Queens Problem as dynamic programming.
https://en.wikipedia.org/wiki/Eight_queens_puzzle
"""
class QueenSolver:
def __init__(self, numqueens, boardsize):
# Cast and validate
self.numqueens = int(numqueens)
self.boardsize = int(boardsize)
assert self.numqueens <= self.boardsize
# Initialize
self._horizon = int(self.numqueens)
self._actions = tuple((i,j) for i in range(self.boardsize) for j in range(self.boardsize))
self._values = dict()
self._policy = dict()
self._State = frozenset
# Solve
trajectory = [self._State()]
self._evaluate(trajectory[0], 0)
for t in range(self._horizon):
trajectory.append(self._transition(trajectory[t], self._policy[trajectory[t]]))
self.solution = trajectory[-1]
# Visualize
display = "SOLUTION:\n"
for i in range(self.boardsize):
for j in range(self.boardsize):
display += " Q" if (i,j) in self.solution else " -"
display += '\n'
print(display)
def _evaluate(self, state, time):
# Boundary conditions
if self._terminal(state, time):
return 0.0
# Memoized regions of the value function so far
if state in self._values:
return self._values[state]
# Dynamic programming principle
self._values[state] = -1e99
for action in self._actions:
value = self._reward(state, action) + self._evaluate(self._transition(state, action), time+1)
if value >= self._values[state]:
self._values[state] = value
self._policy[state] = action
return self._values[state]
def _terminal(self, state, time):
# Are no more queens left?
if time > self._horizon:
return True
# Is the state illegal?
if state:
# Have queens merged into the s
|
ame space?
if len(state) != time:
return True
# Are queens threatening each other?
rows, cols, ldiags, rdiags = zip(*((q[0], q[1], q[1]-q[0], self.boardsize-(q[0]+q[1])-1) for q in state))
|
for i in range(0, len(state)):
for j in range(i+1, len(state)):
if (rows[i] == rows[j]) or (cols[i] == cols[j]) or (ldiags[i] == ldiags[j]) or (rdiags[i] == rdiags[j]):
return True
return False
def _reward(self, state, action):
# Same for any placement because illegal placements are (more efficiently) handled by the boundary conditions
return 1.0
def _transition(self, state, action):
# Add the placed queen to the set of already-placed queens
return self._State((*state, action))
n = 8
qs = QueenSolver(numqueens=n, boardsize=n)
|
rallured/PyXFocus
|
examples/arcus/uvYaw.py
|
Python
|
mit
| 3,901
| 0.026916
|
import numpy as np
import matplotlib.pyplot as plt
import pdb
import traces.sources as sources
import traces.transformations as tran
import traces.surfaces as surf
#Set up incident beam trace and determine sensitivity to beam
#impact location.
#Trace nominal geometry (function of incidence angle) and
#record location of diffracted and reflected spots
#Perturb location and angle of beam impact and record
#change in spot locations
#What don't you know about geometry?
#Location and angle of source -> location and angle of beam impact
#Location of detectors (angle is very small contributor)
#Write raytrace with beam impact orientation and
#grating orientation as free parameters
def alignTrace(inc,impact,grating,detector,order=0):
"""Traces UV laser rays to grating. Beam impact misalignment
is handled with a single coordinate transformations right after
source definition. Grating orientation is handled with
symmetric coordinate transformations.
inc - nominal beam glancing angle, must be less than
50.39 deg for 262 nm light
impact - 6 element array giving beam impact transform
grating - 6 element array giving grating misalignment
"""
#Set up source with single ray, diffraction plane
#is XZ, glancing angle from XY plane, ray starts out
#pointing +x and -z
rays = sources.pointsource(0.,1)
tran.transform(rays,0,0,0,0,-np.pi/2-inc,0)
#Perform beam impact misalignment transform, rotation first
tran.transform(rays,*np.concatenate(((0,0,0),impact[3:])))
tran.transform(rays,*np.concatenate((impact[:3],(0,0,0))))
#Perform grating misalignment
tran.transform(rays,*grating)
#Linear grating
surf.flat(rays)
tran.reflect(rays)
tran.grat(rays,160.,order,262.)
#Reverse misalignment transformation
tran.itransform(rays,*grating)
#Go to detector depending on order
if order is not 0:
tran.transform(rays,-200.,0,0,0,0,0)
else:
tran.transform(rays,200.,0,0,0,0,0)
#Trace to detector
tran.transform(rays,0,0,0,0,-np.pi/2,0)
tran.transform(rays,*detector)
surf.flat(rays)
#Return ray position
return rays[1],rays[2]
def computeYaw(inc,impact,grating,detr,detd):
"""Traces both orders and computes yaw of grating
Uses alignTrace
Returns yaw angle assuming
"""
xr,yr = alignTrace(inc,impact,grating,detr,order=0)
betar = 800./np.sqrt(800.**2+xr**2+yr**2)
alphar = -yr/np.sqrt(800.**2+xr**2+yr**2)
xd,yd = alignTrace(inc,impact,grating,detd,order=1)
betad = -800./np.sqrt(800.**2+xd**2+yd**2)
alphad = -yd/np.sqrt(800.**2+xd**2+yd**2)
return np.arctan((alphad-alphar)/(betar-betad))*180./np.pi*60.**2
def dofSensitivity(inc,alignvector,obj='beam',dof=0):
"""Compute x,y positions of reflected and diffracted spots
return as a function of alignvector
"""
#Initialize misalignment vectors
gr
|
ating = np.zeros(6)
impact = np.zeros(6)
#Initialize output vectors
xr = np.zeros(np.size(alignvector))
yr = np.copy(xr)
xd = np.copy(xr)
yd = np.copy(xr)
#Perform raytraces in loop
for a in alignvector:
#Adjust misalignments
if obj is 'beam':
impa
|
ct[dof] = a
else:
grating[dof] = a
#Perform trace and set appropriate output elements
i = a==alignvector
x,y = alignTrace(inc,impact,grating,order=0)
xr[i] = x
yr[i] = y
x,y = alignTrace(inc,impact,grating,order=1)
xd[i] = x
yd[i] = y
#Return
return xr,yr,xd,yd
def diffractionAngle(inc):
"""Return the diffraction angle for the UV yaw system
Input graze angle in degrees
Output diffraction graze angle in degrees"""
alpha0 = np.sin((90.-inc)*np.pi/180)
alpha1 = alpha0 - 266e-9/160e-9
dang = 90 - np.arcsin(np.abs(alpha1))*180/np.pi
return dang
|
CQT-Alex/GSN-heatmap
|
gomsurveyplot.py
|
Python
|
gpl-3.0
| 10,155
| 0.015854
|
'''
gomsurveyplot.py: This program creates the sky heatmap plot from the GomSpace
survey data.
input: A CSV file containing the sky survey data generated by the survey
program provided by GomSpace.
output: A heatmap plot of the input data.
In the plot the elevation are tagged from 0 to 9 where 0 means
90 degree elevation and 9 means 0 degre elevation.
And all the middle values can be understood similarly. For tag
value t the degree value of the elevation is (9-t)*10.
Please consult the documentation to understand the mathematical
basis of this plot.
Usages: To use the program run: python gomsurveyplot.py [inputfile name]
Replace [inputfile name] with our csv file names.
Prerequisites: This program is tested for python2.7
You need the following libraries
numpy
Scipy
matplotlib
To install them on Debian or Ubuntu run:
sudo apt-get install python-numpy python-scipy python-matplotlib
Alternatively you can also use pip to install them. Please look up
the internet for pip instructions.
Improvement opportunities: use binary search for sampling the final values for the
polar skymap. (near line 200) [DONE]
Note: Some auxiliary files are created in the process of crating the final
heat-map. They are a legacy of the development process.
If you dont need or understand them, then you can safely ignore
them.
dev-code: The folder dev-code contains some test and development codes that are
later incorporated in the main software.
Copyright (C) 2016 Tanvirul Islam, National University
of Singapore <tanvirulbd@gmail.com>
This source code is free software; you can redistribute it and/or
modify it under the terms of the GNU Public License as published
by the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This source code is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FI
|
TNESS FOR A PARTICULAR PURPOSE.
Please refer to the GNU Public License for more details.
You should ha
|
ve received a copy of the GNU Public License along with
this source code; if not, see: <https://www.gnu.org/licenses/gpl.html>
'''
import os
import sys
import inspect
import csv
import re
import numpy as np
import bisect
import math
from scipy.interpolate import griddata
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from scipy import ndimage
'''
function: findclosest() uses bisection algorithm to find
the closest element in a sorted list.
inputs: a = a list sorted in an ascending order
x = the search item
output: i = the index of the value closest to x in a
'''
def findclosest(a, x):
i = bisect.bisect(a, x)
if i == len(a):
return len(a)-1
if i == 0:
return 0
if x - a[i-1] <= a[i] - x:
return i-1
return i
'''
The main function
'''
if __name__ == "__main__":
'''
Take the inputfile name from the commandline
'''
infile = ""
if len(sys.argv) > 1:
infile = sys.argv[1]
else:
print "Error: Wrong parameters!"
print "try: python gomsurveyplot.py infile.csv"
exit()
#rval = float(sys.argv[2])
#tval = float(sys.argv[3])
#GomSpace csv data to be loaded in polar format in this list
polar_data = []
with open(infile, 'r') as fp:
lines = fp.readlines()
#Read the input file; tokenize it and load it in the data list.
data = []
for ln in lines:
line_toks = re.split(",|\r\n",ln)
data.append(line_toks)
print len(lines)
print len(data)
# convert all the elevation angle to radious where radious r means (9-r)*10 degree elevation
for r in reversed (range (1, 10)):
for i in range(0, 36):
polar_data.append([r,
float(data[(9 - r) * 36 + i][0]),
float(data[(9 - r) * 36 + i][2])])
#take the avg of all the 90 degree elevation readings
sum90 = 0
for i in range(0, 36):
sum90 += float(data[(9) * 36 + i][2])
avg90 = sum90 / 36.0
polar_data.append([0, 0,avg90])
#print polar_data size
print len(polar_data)
#print the polar data to csv file
outfile3 = "d3" + "polar" + os.path.basename(infile)
with open (outfile3, 'w') as f_eos:
w = csv.writer(f_eos)
w.writerows(sorted(polar_data))
print "output written in "+ outfile3
'''
The data is taken for only 360 points. To get the 2d heatmap for the whole sky
we want to interpolate it for all the other points.
We cannot perform interpolation using the existing python interpolation libraries if the
data is in polar format. There fore first we convert the polar data to Cartesian format.
'''
cart_data = []
with open(outfile3, 'r') as fp:
lines = fp.readlines()
data = []
for ln in lines:
line_toks = re.split(",|\r\n", ln)
r = float(line_toks[0])
theta = float(line_toks[1])
val = float(line_toks[2])
x = r * math.cos(math.radians(theta))
y = r * math.sin(math.radians(theta))
cart_data.append([x,y,val])
#print Cartesian data to file
outfile4 = "d4" + "cart-sparce" + outfile3
with open (outfile4, 'w') as f_eos:
w = csv.writer(f_eos)
w.writerows(sorted(cart_data))
#print "output written in "+"d4"+"cart-sparce"+infile
'''
Now we read the suvey data in curtesian format and perform a bicubic interpolation.
'''
points = []
values = []
infile = outfile4
with open(infile, 'r') as fp:
lines = fp.readlines()
data = []
for ln in lines:
line_toks = re.split(",|\r\n", ln)
points.append( [float(line_toks[0]), float(line_toks[1])])
values.append(float(line_toks[2]))
#print points.shape
#print values.shape
npoints = np.array(points)
print npoints.shape
nvalues = np.array(values)
print nvalues.shape
print 'minimum value=', min(nvalues)
#points to be interpolated
in_points = []
#take equally spaced 1000 values on the x line between -9 to 9
ixp = np.linspace(-9, 9, 1000)
iyp = np.linspace(-9, 9, 1000) # same for the y
# now we creat 1000*1000 meshgrids for x and y values of the target points.
# on which the interpolated values are to be computed.
# the shape of the meshgrid has some quirks please carefully review the numpy.meshgrid documentation.
xv, yv = np.meshgrid(ixp, iyp)
print xv.shape
print 'shape up'
#interpolated values
# the survey points and values are given in npoints and nvalues. The fill values are initially given to be minimum of all values.
# the interpolated values are returned in invals.
invals = griddata(npoints,nvalues,(xv,yv), method = 'cubic', fill_value = min(nvalues))
#the polar points where we would sample from the interpolated data.
#we have taken 1000*1000 cartesian grid for interpolation to minimize rounding error in this sampling.
r = np.linspace(0, 9, 100)
t = np.linspace(0.0, 2.0 * np.pi, 360)
# as usual, creat the meshgrid.
rv,tv = np.meshgrid(r, t)
print rv.shape
print tv.shape
print 'shape rv'
#print rv
#print tv
#initialize with zeros
polar_values = np.zeros(rv.shape,dtype=np.float)
for i in range(rv.shape[0]): #shape is a pair object
for j in range(rv.shape[1]):
'''
for each polar point
find the (x,y) value of the point
f
|
ckolumbus/mikidown
|
setup.py
|
Python
|
mit
| 2,051
| 0.009751
|
from distutils import log
from distutils.core import setup
from distutils.command.build import build
from distutils.command.install_scripts import install_scripts
import glob
import sys
from mikidown.config import __version__
class miki_build(build):
def run(self):
# Check the python version
try:
version_info = sys.version_info
assert version_info > (3, 0)
except:
print('ERROR: mikidown needs python >= 3.0', file=sys.stderr)
sys.exit(1)
build.run(self)
class miki_install_scripts(install_scripts):
def run(self):
import shutil
install_scripts.run(self)
for file in self.get_outputs():
log.info('renaming %s to %s', file, file[:-3])
shutil.move(file, file[:-3])
setup(
name='mikidown',
version=__version__,
license = 'MIT',
description = 'A note taking application, featuring markdown syntax',
author = 'rnons',
author_email = 'remotenonsense@gmail.com',
url = 'https://github.com/rnons/mikidown',
scripts=['mikidown.py'],
packages=['mikidown'],
data_files=[('share/mikidown', ['README.mkd']),
('share/mikidown', ['mikidown/notebook.css']),
|
('share/mikidown', ['Changelog.md']),
('share/mikidown/css', glob.glob("mikidown/css/*")),
('share/icons/hicolor/scalable/apps', ['mikidown/icons/mikidown.svg']),
('share/applications', ['mikidown.desktop'])
],
requires=['PyQt', 'markdown', 'whoosh'],
install_requires=['Markdown >= 2.3.1', 'Whoosh >= 2.5.2'],
cmdclass={
|
'build': miki_build,
'install_scripts': miki_install_scripts
},
classifiers=[
"Topic :: Text Editors :: Documentation",
"Development Status :: 3 - Alpha",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3"
]
)
|
EmreAtes/spack
|
lib/spack/spack/test/svn_fetch.py
|
Python
|
lgpl-2.1
| 3,319
| 0
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import pytest
import spack
from llnl.util.filesystem import join_path, touch, working_dir
from spack.spec import Spec
from spack.version import ver
from spack.util.executable import which
pytestmark = pytest.mark.skipif(
not which('svn'), reason='requires subversion to be installed')
@pytest.mark.parametrize("type_of_test", ['default', 'rev0'])
@pytest.mark.parametrize("secure", [True, False])
def test_fetch(
type_of_test,
secure,
mock_svn_repository,
config,
refresh_builtin_mock
):
"""Tries to:
1. Fetch the repo using a fetch strategy constructed with
supplied args (they depend on type_of_test).
2. Check if the test_file is in the checked out repository.
3. Assert that the repository is at the revision supplied.
4. Add and remove some files, then reset the repo, and
ensure it's all there again.
"""
# Retrieve the right test parameters
t = mock_svn_repository.checks[type_of_test]
h = mock_svn_repository.hash
# Construct the package under test
spec = Spec('svn-test')
spec.concretize()
pkg = spack.repo.get(spec)
pkg.versions[ver('svn')] = t.args
# Enter the stage directory and check some properties
with pkg.stage:
try:
spack.insecure = secure
pkg.do_stage()
finally:
spack.insecure = False
with working_dir(pkg.stage.source_path):
assert h() == t.revision
file_path = join_path(pkg.stage.source_path, t.file)
assert os.path.isdir(pkg.stage.source_path)
assert os
|
.path.isfile(file_path)
os.unlink(file_path)
assert not os.path.isfile(file_path)
untracked_file = 'foobarbaz'
touch(untracked_file)
assert o
|
s.path.isfile(untracked_file)
pkg.do_restage()
assert not os.path.isfile(untracked_file)
assert os.path.isdir(pkg.stage.source_path)
assert os.path.isfile(file_path)
assert h() == t.revision
|
django-fluent/django-fluent-contents
|
fluent_contents/plugins/formdesignerlink/migrations/0001_initial.py
|
Python
|
apache-2.0
| 1,286
| 0.000778
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("form_designer", "__first__"), ("fluent_contents", "0001_initial")]
operations = [
migrations.CreateModel(
name="FormDesignerLink",
fields=[
(
"contentitem_ptr",
models.OneToOneField(
parent_link=True,
on_delete=models.CASCADE,
auto_created=True,
primary_key=True,
serialize=False,
to="fluent_contents.ContentItem",
),
),
(
"form_definition",
models.ForeignKey(
verbose_name="Form",
on_delete=models.PROTECT,
to="form_designer.FormDefinitio
|
n",
),
),
],
options={
"db_table": "contentitem_formdesignerlink_formdesignerlink",
"verbose_name": "
|
Form link",
"verbose_name_plural": "Form links",
},
bases=("fluent_contents.contentitem",),
)
]
|
HugoMMRabson/fonsa
|
src/test/old/backend/svrtools/crypto/__init__.py
|
Python
|
gpl-3.0
| 2,949
| 0.00373
|
'''
test.backend.svrtools.crypto.__init__
'''
import unittest
from my.backend im
|
port Backend
# from my.backend.crypto import is_this_the_correct_dollhouse_password
from my.globals.exceptions import WrongDollhousePasswordError
from my.miscellany import random_alphanum_string
class Test_is_this_the_correct_dollhouse_password(unittest.TestCase):
'''
Test the supplied password. If it's the right one to decrypt the dollhouse key, return True; else, False.
Inputs:
pw string; if we are using DMCRYPT, use 'pw'
|
to try to decrypt the key
and check to see if the password is the right onw. If we are using
ECRYPTFS, this function is not used because ECRYPTFS (in our usage)
does not use an external keyfile but uses a password instead.
Outputs:
True/False is password valid?
'''
def setUp(self):
Backend.dollhouse.close()
def tearDown(self):
Backend.dollhouse.close()
# def test_is_this_the_correct_dollhouse_password(self):
# '''
# Test is_this_the_correct_dollhouse_password() with a known-good password
# and then with a known-bad password
# '''
# for pw in (random_alphanum_string(20), '12345678', random_alphanum_string(10), '12345678'):
# Backend.dollhouse.open(pw=pw, wipe=True)
# self.assertTrue(Backend.dollhouse.is_open)
# Backend.dollhouse.close()
# self.assertTrue(is_this_the_correct_dollhouse_password(pw))
# self.assertFalse(Backend.dollhouse.is_open)
# self.assertFalse(is_this_the_correct_dollhouse_password('nahh' + pw + 'unhunh'))
def test_data_persistence(self):
'''
Test the mounted dollhouse's ability to retain information in between dismounts/mounts.
'''
for pw in (random_alphanum_string(20), '12345678', random_alphanum_string(10), '12345678'):
Backend.dollhouse.open(pw=pw, wipe=True)
teststring = random_alphanum_string(30)
with open('/.home_rw/hi.txt', 'w') as f:
f.write(teststring)
Backend.dollhouse.close()
Backend.dollhouse.open(pw=pw)
with open('/.home_rw/hi.txt', 'r') as f:
whatwassaved = f.read()
self.assertEqual(teststring, whatwassaved)
Backend.dollhouse.close()
try:
Backend.dollhouse.open(teststring)
raise Exception('We should not be able to unlock with wrong password')
except WrongDollhousePasswordError:
pass
self.assertTrue(Backend.dollhouse.is_open)
Backend.dollhouse.close()
self.assertFalse(Backend.dollhouse.is_open)
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
if __name__ == "__main__":
unittest.main()
|
pallets/click
|
examples/aliases/aliases.py
|
Python
|
bsd-3-clause
| 4,061
| 0.000492
|
import configparser
import os
import click
class Config:
"""The config in this example only holds aliases."""
def __init__(self):
self.path = os.getcwd()
self.aliases = {}
def add_alias(self, alias, cmd):
self.aliases.update({alias: cmd})
def read_config(self, filename):
parser = configparser.RawConfigParser()
parser.read([filename])
try:
self.aliases.update(parser.items("aliases"))
except configparser.NoSectionError:
pass
def write_config(self, file
|
name):
parser = configparser.RawConfigParser()
parser.add_section("aliases")
for key, value in self.aliases.items():
parser.set("aliases", key, value)
with open(filename, "wb") as file:
parser.write(file)
pass_config = click.make_pass_decorator(
|
Config, ensure=True)
class AliasedGroup(click.Group):
"""This subclass of a group supports looking up aliases in a config
file and with a bit of magic.
"""
def get_command(self, ctx, cmd_name):
# Step one: bulitin commands as normal
rv = click.Group.get_command(self, ctx, cmd_name)
if rv is not None:
return rv
# Step two: find the config object and ensure it's there. This
# will create the config object is missing.
cfg = ctx.ensure_object(Config)
# Step three: look up an explicit command alias in the config
if cmd_name in cfg.aliases:
actual_cmd = cfg.aliases[cmd_name]
return click.Group.get_command(self, ctx, actual_cmd)
# Alternative option: if we did not find an explicit alias we
# allow automatic abbreviation of the command. "status" for
# instance will match "st". We only allow that however if
# there is only one command.
matches = [
x for x in self.list_commands(ctx) if x.lower().startswith(cmd_name.lower())
]
if not matches:
return None
elif len(matches) == 1:
return click.Group.get_command(self, ctx, matches[0])
ctx.fail(f"Too many matches: {', '.join(sorted(matches))}")
def resolve_command(self, ctx, args):
# always return the command's name, not the alias
_, cmd, args = super().resolve_command(ctx, args)
return cmd.name, cmd, args
def read_config(ctx, param, value):
"""Callback that is used whenever --config is passed. We use this to
always load the correct config. This means that the config is loaded
even if the group itself never executes so our aliases stay always
available.
"""
cfg = ctx.ensure_object(Config)
if value is None:
value = os.path.join(os.path.dirname(__file__), "aliases.ini")
cfg.read_config(value)
return value
@click.command(cls=AliasedGroup)
@click.option(
"--config",
type=click.Path(exists=True, dir_okay=False),
callback=read_config,
expose_value=False,
help="The config file to use instead of the default.",
)
def cli():
"""An example application that supports aliases."""
@cli.command()
def push():
"""Pushes changes."""
click.echo("Push")
@cli.command()
def pull():
"""Pulls changes."""
click.echo("Pull")
@cli.command()
def clone():
"""Clones a repository."""
click.echo("Clone")
@cli.command()
def commit():
"""Commits pending changes."""
click.echo("Commit")
@cli.command()
@pass_config
def status(config):
"""Shows the status."""
click.echo(f"Status for {config.path}")
@cli.command()
@pass_config
@click.argument("alias_", metavar="ALIAS", type=click.STRING)
@click.argument("cmd", type=click.STRING)
@click.option(
"--config_file", type=click.Path(exists=True, dir_okay=False), default="aliases.ini"
)
def alias(config, alias_, cmd, config_file):
"""Adds an alias to the specified configuration file."""
config.add_alias(alias_, cmd)
config.write_config(config_file)
click.echo(f"Added '{alias_}' as alias for '{cmd}'")
|
miyazaki-tm/aoj
|
Volume0/0030.py
|
Python
|
mit
| 334
| 0
|
"""
|
Sum of Integers
"""
import itertools
if __name__ == '__main__':
while True:
n, s = map(int, raw_input().split())
if n == 0 and s == 0:
break
result = 0
for a in itertools.combinations(xrange(10), n):
if sum(a) == s:
result += 1
prin
|
t result
|
shymonk/django-datatable
|
example/app/views.py
|
Python
|
mit
| 2,085
| 0.00048
|
#!/usr/bin/env python
# coding: utf-8
from django.shortcuts import render
from table.views import FeedDataView
from app.tables import (
ModelTable, AjaxTable, AjaxSourceTable,
CalendarColumnTable, SequenceColumnTable,
LinkColumnTable, CheckboxColumnTable,
ButtonsExtensionTable
)
def base(request):
table = ModelTable()
return render(request, "index.html", {'people': table})
def ajax(request):
table = AjaxTable()
return render(request, "index.html", {'people': table})
def ajax_source(request):
table = AjaxSourceTable()
return render(request, "index.html", {'people': table})
class Foo(object):
def __init__(self, id, name, calendar):
self.id = id
self.name = name
self.calendar = calendar
def sequence_column(request):
data = [
Foo(1, 'A', [1, 2, 3, 4, 5]),
Foo(2, 'B', [1, 2, 3, 4, 5]),
Foo(3, 'C', [1, 2, 3, 4, 5])
]
table = SequenceColumnTable(data)
return render(request, "index.html", {'people': table})
def calendar_column(request):
data = [
Foo(1, 'A', range(1, 14)),
Foo(2, 'B', range(1, 14)),
Foo(3, 'C', range(1, 14))
]
table = CalendarColumnTable(data)
return render(request, "i
|
ndex.html", {'people': table})
def link_column(request):
table = LinkColumnTable()
return render(request, "index.html", {'people': table})
def checkbox_column(request):
table = CheckboxColumnTable()
return render(request, "index.html", {'people': table})
def buttons_extension(request):
table = ButtonsExtensionTable()
return render(request, "index.ht
|
ml", {'people': table})
def user_profile(request, uid):
from app.models import Person
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
person = get_object_or_404(Person, pk=uid)
return HttpResponse("User %s" % person.name)
class MyDataView(FeedDataView):
token = AjaxSourceTable.token
def get_queryset(self):
return super(MyDataView, self).get_queryset().filter(id__gt=5)
|
jocelynj/weboob
|
weboob/backends/aum/pages/base.py
|
Python
|
gpl-3.0
| 4,096
| 0.002442
|
# -*- coding: utf-8 -*-
# Copyright(C) 2008-2010 Romain Bignon
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import re
from weboob.tools.browser import BasePage, BrowserUnavailable
from weboob.backends.aum.exceptions import AdopteBanned
class PageBase(BasePage):
def __init__(self, *args, **kwargs):
BasePage.__init__(self, *args, **kwargs)
# Check the 'oops' error message when adopteunmec guys are gay.
b = self.document.getElementsByTagName('body')[0]
for div in b.getElementsByTagName('div'):
if div.getAttribute('id') == 'oops':
raise BrowserUnavailable('Oops')
# Check when the account is temporarily blocked.
for img in self.document.getElementsByTagName('img'):
if img.getAttribute('src') == 'http://s.adopteunmec.com/img/exemple.jpg':
raise AdopteBanned('Your account is blocked. You have to unblock by yourself but we can\'t help you.')
def open_contact_list_page(self):
self.browser.follow_link(url_regex=r"/mails.php$")
def open_thread_page(self, id, all_messages=False):
if all_messages:
self.browser.location('/thread.php?id=%d&see=all' % int(id))
else:
self.browser.location('/thread.php?id=%d' % int(id))
def score(self):
"""
<table width="220">
<tr>
<td align=left class=header>popularité</td>
<td align=right class=header><big style="color:#ff0198;" id=popScore>7.230</big> pts</td>
</tr>
</table>
"""
l = self.document.getElementsByTagName('
|
table')
for tag in l:
if tag.getAttribute('width') == '220':
# <table><tbody(implicit)><tr><td>
child = tag.childNodes[0].childNodes[0].childNodes[3]
return int(child.childNodes[0].childNodes[1].data.replace(' ', '').strip())
self.logger.error("Error: I can't find the score :(")
return '0'
def __get_indicator(self, elementName):
""" <span id=mailsCounter><blink>1</blink></span> """
|
l = self.document.getElementsByTagName('span')
for tag in l:
if tag.getAttribute('id') == elementName:
child = tag.childNodes[0]
if not hasattr(child, 'data'):
if child.tagName != u'blink':
self.logger.warning("Warning: %s counter isn't a blink and hasn't data" % elementName)
child = child.childNodes[0]
if not hasattr(child, 'data'):
break
return int(child.data)
self.logger.error("Error: I can't find the %s counter :(" % elementName)
return 0
MYNAME_REGEXP = re.compile("Bonjour (.*)")
def get_my_name(self):
""" <span class=header2>Bonjour Romain</span> """
tags = self.document.getElementsByTagName('span')
for tag in tags:
if hasattr(tag.firstChild, 'data'):
m = self.MYNAME_REGEXP.match(tag.firstChild.data)
if m:
return m.group(1)
self.logger.warning('Warning: Unable to fetch name')
return '?'
def nb_new_mails(self):
return self.__get_indicator(u'mailsCounter')
def nb_new_baskets(self):
return self.__get_indicator(u'flashsCounter')
def nb_new_visites(self):
return self.__get_indicator(u'visitesCounter')
|
kaidokert/cookiecutter-django
|
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/taskapp/celery.py
|
Python
|
bsd-3-clause
| 1,017
| 0.012783
|
{% if cookiecutter.use_celery == "y" %}
from __future__ import absolute_import
import os
from celery import Celery
from django.apps import AppConfig
from django.conf import settings
if not settings.configured:
# set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
app = Celery('{{cookiecutter.repo_name}}')
class CeleryConfig(AppConfig):
name = '{{cookiecutter.repo_name}}.taskapp'
verbose_name = 'Celery Config'
def ready(self):
|
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS, force=True)
@app.task(bind=True)
def de
|
bug_task(self):
print('Request: {0!r}'.format(self.request))
{% else %}
# Use this as a starting point for your project with celery.
# If you are not using celery, you can remove this app
{% endif %}
|
forkbong/qutebrowser
|
qutebrowser/misc/sql.py
|
Python
|
gpl-3.0
| 16,139
| 0.000806
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2021 Ryan Roden-Corrent (rcorre) <ryan@rcorre.net>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Provides access to an in-memory sqlite database."""
import collections
import dataclasses
from PyQt5.QtCore import QObject, pyqtSignal
from PyQt5.QtSql import QSqlDatabase, QSqlQuery, QSqlError
from qutebrowser.utils import log, debug
@dataclasses.dataclass
class UserVersion:
"""The version of data stored in the history database.
When we originally started using user_version, we only used it to signify that the
completion database should be regenerated. However, sometimes there are
backwards-incompatible changes.
Instead, we now (ab)use the fact that the user_version in sqlite is a 32-bit integer
to store both a major and a minor part. If only the minor part changed, we can deal
with it (there are only new URLs to clean up or somesuch). If the major part
changed, there are backwards-incompatible changes in how the database works, so
newer databases are not compatible with older qutebrowser versions.
"""
major: int
minor: int
@classmethod
def from_int(cls, num):
"""Parse a number from sqlite into a major/minor user version."""
assert 0 <= num <= 0x7FFF_FFFF, num # signed integer, but shouldn't be negative
major = (num & 0x7FFF_0000) >> 16
minor = num & 0x0000_FFFF
return cls(major, minor)
def to_int(self):
"""Get a sqlite integer from a major/minor user version."""
assert 0 <= self.major <= 0x7FFF # signed integer
assert 0 <= self.minor <= 0xFFFF
return self.major << 16 | self.minor
def __str__(self):
return f'{self.major}.{self.minor}'
_db_user_version = None # The user version we got from the database
_USER_VERSION = UserVersion(0, 3) # The current / newest user version
def user_version_changed():
"""Whether the version stored in the database is different from the current one."""
return _db_user_version != _USER_VERSION
class SqliteErrorCode:
"""Error codes as used by sqlite.
See https://sqlite.org/rescode.html - note we only define the codes we use
in qutebrowser here.
"""
ERROR = '1' # generic error code
BUSY = '5' # database is locked
READONLY = '8' # attempt to write a readonly database
IOERR = '10' # disk I/O error
CORRUPT = '11' # database disk image is malformed
FULL = '13' # database or disk is full
CANTOPEN = '14' # unable to open database file
PROTOCOL = '15' # locking protocol error
CONSTRAINT = '19' # UNIQUE constraint failed
NOTADB = '26' # file is not a database
class Error(Exception):
"""Base class for all SQL related errors."""
def __init__(self, msg, error=None):
super().__init__(msg)
self.error = error
def text(self):
"""Get a short text description of the error.
This is a string suitable to show to the user as error message.
"""
if self.error is None:
return str(self)
else:
return self.error.databaseText()
class KnownError(Error):
"""Raised on an error interacting with the SQL database.
This is raised in conditions resulting from the environment (like a full
disk or I/O errors), where qutebrowser isn't to blame.
"""
class BugError(Error):
"""Raised on an error interacting with the SQL database.
This is raised for errors resulting from a qutebrowser bug.
"""
def raise_sqlite_error(msg, error):
"""Raise either a BugError or KnownError."""
error_code = error.nativeErrorCode()
database_text = error.databaseText()
driver_text = error.driverText()
log.sql.debug("SQL error:")
log.sql.debug("type: {}".format(
debug.qenum_key(QSqlError, error.type())))
log.sql.debug("database text: {}".format(database_text))
log.sql.debug("driver text: {}".format(driver_text))
log.sql.debug("error code: {}".format(error_code))
known_errors = [
SqliteErrorCode.BUSY,
SqliteErrorCode.READONLY,
SqliteErrorCode.IOERR,
SqliteErrorCode.CORRUPT,
SqliteErrorCode.FULL,
SqliteErrorCode.CANTOPEN,
SqliteErrorCode.PROTOCOL,
SqliteErrorCode.NOTADB,
]
# https://github.com/qutebrowser/qutebrowser/issues/4681
# If the query we built was too long
too_long_err = (
error_code == SqliteErrorCode.ERROR and
(database_text.startswith("Expression tree is too large") or
database_text in ["too many SQL variables",
"LIKE or GLOB pattern too complex"]))
if error_code in known_errors or too_long_err:
raise KnownError(msg, error)
raise BugError(msg, error)
def init(db_path):
"""Initialize the SQL database connection."""
database = QSqlDatabase.addDatabase('QSQLITE')
if not database.isValid():
raise KnownError('Failed to add database. Are sqlite and Qt sqlite '
'support inst
|
alled?')
database.setDatabaseName(db_path)
if not database.open():
error = database.lastError()
msg = "Failed to open sqlite database at {}: {}".format(db_path,
error.text())
raise_sqlite_error(msg, error)
global _db_user_version
version_int = Query('pragma user_ver
|
sion').run().value()
_db_user_version = UserVersion.from_int(version_int)
if _db_user_version.major > _USER_VERSION.major:
raise KnownError(
"Database is too new for this qutebrowser version (database version "
f"{_db_user_version}, but {_USER_VERSION.major}.x is supported)")
if user_version_changed():
log.sql.debug(f"Migrating from version {_db_user_version} to {_USER_VERSION}")
# Note we're *not* updating the _db_user_version global here. We still want
# user_version_changed() to return True, as other modules (such as history.py)
# use it to create the initial table structure.
Query(f'PRAGMA user_version = {_USER_VERSION.to_int()}').run()
# Enable write-ahead-logging and reduce disk write frequency
# see https://sqlite.org/pragma.html and issues #2930 and #3507
#
# We might already have done this (without a migration) in earlier versions, but
# as those are idempotent, let's make sure we run them once again.
Query("PRAGMA journal_mode=WAL").run()
Query("PRAGMA synchronous=NORMAL").run()
def close():
"""Close the SQL connection."""
QSqlDatabase.removeDatabase(QSqlDatabase.database().connectionName())
def version():
"""Return the sqlite version string."""
try:
if not QSqlDatabase.database().isOpen():
init(':memory:')
ver = Query("select sqlite_version()").run().value()
close()
return ver
return Query("select sqlite_version()").run().value()
except KnownError as e:
return 'UNAVAILABLE ({})'.format(e)
class Query:
"""A prepared SQL query."""
def __init__(self, querystr, forward_only=True):
"""Prepare a new SQL query.
Args:
querystr: String to prepare query from.
forward_only: Optimization for queries that will only step forward.
Must be false for completion queries.
"""
self.query = QSqlQuery(QSqlDatabase.d
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.