repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
haandol/assemblyapi | main/views.py | Python | apache-2.0 | 215 | 0.013953 | #coding: utf-8
from | django.http import HttpResponse
from django.shortcuts import | render
def index(request):
return HttpResponse('UnderConstruction')
def help(request):
return render(request, 'help.html')
|
singingwolfboy/webhookdb | webhookdb/tasks/label.py | Python | agpl-3.0 | 4,696 | 0.000426 | # coding=utf-8
from __future__ import unicode_literals, print_function
from datetime import datetime
from celery import group
from urlobject import URLObject
from webhookdb import db, celery
from webhookdb.process import process_label
from webhookdb.models import IssueLabel, Repository, Mutex
from webhookdb.exceptions import NotFound, StaleData, MissingData, DatabaseError
from sqlalchemy.exc import IntegrityError
from webhookdb.tasks import logger
from webhookdb.tasks.fetch import fetch_url_from_github
LOCK_TEMPLATE = "Repository|{owner}/{repo}|labels"
@celery.task(bind=True)
def sync_label(self, owner, repo, name, children=False, requestor_id=None):
label_url = "/repos/{owner}/{repo}/labels/{name}".format(
owner=owner, repo=repo, name=name,
)
try:
resp = fetch_url_from_github(label_url, requestor_id=requestor_id)
except NotFound:
# add more context
msg = "Label {name} on {owner}/{repo} not found".format(
name=name, owner=owner, repo=repo,
)
raise NotFound(msg, {
"type": "label",
"name": name,
"owner": owner,
"repo": repo,
})
label_data = resp.json()
try:
label = process_label(
label_data, via="api", fetched_at=datetime.now(), commit=True,
)
except IntegrityError as exc:
# multiple workers tried to insert the same label simulataneously. Retry!
self.retry(exc=exc)
return label.name
@celery.task(bind=True)
def sync_page_of_labels(self, owner, repo, children=False, requestor_id=None,
per_page=100, page=1):
label_page_url = (
"/repos/{owner}/{repo}/labels?"
"per_page={per_page}&page={page}"
).format(
owner=owner, repo=repo,
per_page=per_page, page=page
)
resp = fetch_url_from_github(label_page_url, requestor_id=requestor_id)
fetched_at = datetime.now()
label_data_list = resp.json()
results = []
repo_id = None
for label_data in label_data_list:
try:
label = process_label(
label_data, via="api", fetched_at=fetched_at, commit=True,
repo_id=repo_id,
)
repo_id = repo_id or label.repo_id
results.append(label.name)
except IntegrityError as exc:
self.retry(exc=exc)
return results
@celery.task()
def labels_scanned(owner, repo, requestor_id=None):
"""
Update the timestamp on the repository object,
and delete old labels that weren't updated.
"""
repo_name = repo
repo = Repository.get(owner, repo_name)
prev_scan_at = repo.labels_last_scanned_at
repo.labels_last_scanned_at = datetime.now()
db.session.add(repo)
if prev_scan_at:
# delete any labels that were not updated since the previous scan --
# they have been removed from Github
query = (
Label.query.filter_by(repo_id=repo.id)
.filter(Label.last_replicated_at < prev_scan_at)
)
query.delete()
# delete the mutex
lock_name = LOCK_TEMPLATE.format(owner=owner, repo=repo_name)
Mutex.query.filter_by(name=lock_name).delete()
logger.info("Lock {name} deleted".format(name=lock_name))
db.session.commit()
@celery.task()
def spawn_page_tasks_for_labels(owner, repo, children=False,
requestor_id=None, per_page=100):
# acquire lock or fail (we're already in a transaction)
lock_name = LOCK_TEMPLATE.format(owner=owner, repo=repo)
existing = Mutex.query.get(lock_name)
if existing:
return False
lock = Mutex(name=lock_name, user_id=requestor_id)
db.session.add(lock)
try:
db.session.commit()
except IntegrityError:
return False
else:
logger.info("Lock {name} set by {requestor_id}".format(
name=lock_name, requestor_id=requestor_id,
))
label_list_url = (
"/repos/{owner}/{repo}/labels?per_page={per_page}"
).format(
owner=owner, repo=repo, per_page=per_page,
)
resp = fetch_url_from_github(
label_list_url, method="HEAD", requestor_id=requestor_id,
)
last_page_url = URLObject(resp.links.get('last', {}).get('url', ""))
last_page_num = int(last_page_url.query.dict.get('page', 1))
g = grou | p(
sync_page_of_labels.s(
owner=owner, repo=repo, requestor_id=requestor_id,
per_page=per_page | , page=page
) for page in xrange(1, last_page_num+1)
)
finisher = labels_scanned.si(
owner=owner, repo=repo, requestor_id=requestor_id,
)
return (g | finisher).delay()
|
jiadaizhao/LeetCode | 0301-0400/0394-Decode String/0394-Decode String.py | Python | mit | 1,173 | 0.002558 | class Solution:
def decodeString(self, s: str) -> str:
St = []
num = 0
curr = ''
for c in s:
if c.isdigit():
num = num*10 + int(c)
elif c == '[':
St.append([num, curr | ])
num = 0
curr = ''
elif c == ']':
count, prev = St.pop()
curr = prev + count*curr
else:
curr += c
return curr
class Solution2:
def decodeString(self, s: str) -> str:
i = 0
def decode(s):
no | nlocal i
result = []
while i < len(s) and s[i] != ']':
if s[i].isdigit():
num = 0
while i < len(s) and s[i].isdigit():
num = num*10 + int(s[i])
i += 1
i += 1
temp = decode(s)
i += 1
result += temp*num
else:
result.append(s[i])
i += 1
return result
return ''.join(decode(s))
|
SamiHiltunen/invenio-upgrader | invenio_upgrader/upgrades/invenio_2013_06_24_new_bibsched_status_table.py | Python | gpl-2.0 | 1,112 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2012 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it | under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without | even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.legacy.dbquery import run_sql
depends_on = ['invenio_release_1_1_0']
def info():
return "New bibsched status (schSTATUS) table"
def do_upgrade():
run_sql("""CREATE TABLE IF NOT EXISTS schSTATUS (
name varchar(50),
value mediumblob,
PRIMARY KEY (name)
) ENGINE=MyISAM
""")
def estimate():
return 1
|
gares/coq | doc/tools/coqrst/coqdomain.py | Python | lgpl-2.1 | 54,478 | 0.003572 | ##########################################################################
## # The Coq Proof Assistant / The Coq Development Team ##
## v # Copyright INRIA, CNRS and contributors ##
## <O___,, # (see version control and CREDITS file for authors & dates) ##
## \VV/ ###############################################################
## // # This file is distributed under the terms of the ##
## # GNU Lesser General Public License Version 2.1 ##
## # (see LICENSE file for the text of the license) ##
##########################################################################
"""A Coq domain for Sphinx.
Currently geared towards Coq's manual, rather than Coq source files, but one
could imagine extending it.
"""
# pylint: disable=missing-type-doc, missing-param-doc
# pylint: disable=missing-return-type-doc, missing-return-doc
# pylint: disable=too-few-public-methods, too-many-ancestors, arguments-differ
# pylint: disable=import-outside-toplevel, abstract-method, too-many-lines
import os
import re
from itertools import chain
from collections import defaultdict
from docutils import nodes, utils
from docutils.transforms import Transform
from docutils.parsers.rst import Directive, directives
from docutils.parsers.rst.roles import code_role #, set_classes
from docutils.parsers.rst.directives.admonitions import BaseAdmonition
from sphinx import addnodes
from sphinx.directives import ObjectDescription
from sphinx.domains import Domain, ObjType, Index
from sphinx.errors import ExtensionError
from sphinx.roles import XRefRole
from sphinx.util.docutils import ReferenceRole
from sphinx.util.logging import getLogger, get_node_location
from sphinx.util.nodes import set_source_info, set_role_source_info, make_refnode
from sphinx.writers.latex import LaTeXTranslator
from . import coqdoc
from .repl import ansicolors
from .repl.coqtop import CoqTop, CoqTopError
from .notations.parsing import ParseError
from .notations.sphinx import sphinxify
from .notations.plain import stringify_with_ellipses
# FIXME: Patch this in Sphinx
# https://github.com/coq/coq/issues/12361
def visit_desc_signature(self, node):
hyper = ''
if node.parent['objtype'] != 'describe' and node['ids']:
for id in node['ids']:
hyper += self.hypertarget(id)
self.body.append(hyper)
if not node.get('is_multiline'):
self._visit_signature_line(node)
else:
self.body.append('%\n\\pysigstartmultiline\n')
LaTeXTranslator.visit_desc_signature = visit_desc_signature
PARSE_ERROR = """{}:{} Parse error in notation!
Offending notation: {}
Error message: {}"""
def notation_to_sphinx(notation, source, line, rawtext=None):
"""Parse notation and wrap it in an inline node"""
try:
node = nodes.inline(rawtext or notation, '', *sphinxify(notation), classes=['notation'])
node.source, node.line = source, line
return node
except ParseError as e:
raise ExtensionError(PARSE_ERROR.format(os.path.basename(source), line, notation, e.msg)) from e
def notation_to_string(notation):
"""Parse notation and format it as a string with ellipses."""
try:
return stringify_with_ellipses(notation)
except ParseError as e:
# FIXME source and line aren't defined below — see cc93f419e0
raise ExtensionError(PARSE_ERROR.format(os.path.basename(source), line, notation, e.msg)) from e
def highlight_using_coqdoc(sentence):
"""Lex sentence using coqdoc, and yield inline nodes for each token"""
tokens = coqdoc.lex(utils.unescape(sentence, 1))
for classes, value in tokens:
yield nodes.inline(value, value, classes=classes)
def make_target(objtype, targetid):
"""Create a target to an object of type objtype and id targetid"""
return "coq:{}.{}".format(objtype, targetid)
def make_math_node(latex, docname, nowrap):
node = nodes.math_block(latex, latex)
node['label'] = None # Otherwise equati | ons are numbered
node['nowrap'] = nowrap
nod | e['docname'] = docname
node['number'] = None
return node
class CoqObject(ObjectDescription):
"""A generic Coq object for Sphinx; all Coq objects are subclasses of this.
The fields and methods to override are listed at the top of this class'
implementation. Each object supports the :name: option, which gives an
explicit name to link to.
See the comments and docstrings in CoqObject for more information.
"""
# The semantic domain in which this object lives (eg. “tac”, “cmd”, “chm”…).
# It matches exactly one of the roles used for cross-referencing.
subdomain = None # type: str
# The suffix to use in indices for objects of this type (eg. “(tac)”)
index_suffix = None # type: str
# The annotation to add to headers of objects of this type
# (eg. “Command”, “Theorem”)
annotation = None # type: str
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._sig_names = None
def _name_from_signature(self, signature): # pylint: disable=no-self-use, unused-argument
"""Convert a signature into a name to link to.
‘Signature’ is Sphinx parlance for an object's header (think “type
signature”); for example, the signature of the simplest form of the
``exact`` tactic is ``exact @id``.
Generates a name for the directive. Override this method to return None
to avoid generating a name automatically. This is a convenient way
to automatically generate names (link targets) without having to write
explicit names everywhere.
"""
m = re.match(r"[a-zA-Z0-9_ ]+", signature)
if m:
return m.group(0).strip()
def _render_signature(self, signature, signode):
"""Render a signature, placing resulting nodes into signode."""
raise NotImplementedError(self)
option_spec = {
# Explicit object naming
'name': directives.unchanged,
# Silence warnings produced by report_undocumented_coq_objects
'undocumented': directives.flag,
# noindex omits this object from its index
'noindex': directives.flag
}
def subdomain_data(self):
if self.subdomain is None:
raise ValueError()
return self.env.domaindata['coq']['objects'][self.subdomain]
def _render_annotation(self, signode):
if self.annotation:
annot_node = nodes.inline(self.annotation, self.annotation, classes=['sigannot'])
signode += addnodes.desc_annotation(self.annotation, '', annot_node)
signode += nodes.Text(' ')
def handle_signature(self, signature, signode):
"""Prefix signature with the proper annotation, then render it using
``_render_signature`` (for example, add “Command” in front of commands).
:returns: the names given to the resulting node.
"""
self._render_annotation(signode)
self._render_signature(signature, signode)
names = self._sig_names.get(signature)
if names is None:
name = self._name_from_signature(signature) # pylint: disable=assignment-from-none
# remove trailing ‘.’ found in commands, but not ‘...’ (ellipsis)
if name is not None and name.endswith(".") and not name.endswith("..."):
name = name[:-1]
names = [name] if name else None
return names
def _warn_if_duplicate_name(self, objects, name, signode):
"""Check that two objects in the same domain don't have the same name."""
if name in objects:
MSG = 'Duplicate name {} (other is in {}) attached to {}'
msg = MSG.format(name, self.env.doc2path(objects[name][0]), signode)
self.state_machine.reporter.warning(msg, line=self.lineno)
def _record_name(self, name, target_id, signode):
"""Record a `name` in the current subdomain, mapping it to `target_id`.
Warns if another object of the same name already exists; `signode` is
used in the warning.
"""
names_in_ |
agustinhenze/nikola.debian | tests/test_rst_compiler.py | Python | mit | 10,799 | 0.000556 | # coding: utf8
# Author: Rodrigo Bistolfi
# Date: 03/2013
""" Test cases for Nikola ReST extensions.
A base class ReSTExtensionTestCase provides the tests basic behaivor.
Subclasses must override the "sample" class attribute with the ReST markup.
The sample will be rendered as HTML using publish_parts() by setUp().
One method is provided for checking the resulting HTML:
* assertHTMLContains(element, attributes=None, text=None)
The HTML is parsed with lxml for checking against the data you provide. The
method takes an element argument, a string representing the *name* of an HTML
tag, like "script" or "iframe". We will try to find this tag in the document
and perform the tests on it. You can pass a dictionary to the attributes kwarg
representing the name and the value of the tag attributes. The text kwarg takes
a string argument, which will be tested against the contents of the HTML
element.
One last caveat: you need to url unquote your urls if you are going to test
attributes like "src" or "link", since the HTML rendered by docutils will be
always unquoted.
"""
from __future__ import unicode_literals, absolute_import
import os
import sys
import io
try:
from io import StringIO
except ImportError:
from StringIO import StringIO # NOQA
import tempfile
import docutils
from lxml import html
import pytest
import unittest
import nikola.plugins.compile.rest
from nikola.plugins.compile.rest import gist
from nikola.plugins.compile.rest import vimeo
import nikola.plugins.compile.rest.listing
from nikola.plugins.compile.rest.doc import Plugin as DocPlugin
from nikola.utils import _reload
from .base import BaseTestCase, FakeSite
class ReSTExtensionTestCase(BaseTestCase):
""" Base class for testing ReST extensions """
sample = 'foo'
deps = None
def setUp(self):
self.compiler = nikola.plugins.compile.rest.CompileRest()
self.compiler.set_site(FakeSite())
return super(ReSTExtensionTestCase, self).setUp()
def basic_test(self):
""" Parse cls.sample into a HTML document tree """
self.setHtmlFromRst(self.sample)
def setHtmlFromRst(self, rst):
""" Create html output from rst string """
tmpdir = tempfile.mkdtemp()
inf = os.path.join(tmpdir, 'inf')
outf = os.path.join(tmpdir, 'outf')
depf = os.path.join(tmpdir, 'outf.dep')
with io.open(inf, 'w+', encoding='utf8') as f:
f.write(rst)
self.html = self.compiler.compile_html(inf, outf)
with io.open(outf, 'r', encoding='utf8') as f:
self.html = f.read()
os.unlink(inf)
os.unlink(outf)
if os.path.isfile(depf):
with io.open(depf, 'r', encoding='utf8') as f:
self.assertEqual(self.deps, f.read())
os.unlink(depf)
else:
self.assertEqual(self.deps, None)
os.rmdir(tmpdir)
self.html_doc = html.parse(StringIO(self.html))
def assertHTMLContains(self, element, attributes=None, text=None):
""" Test if HTML document includes an element with the given
attributes and text content
"""
try:
tag = next(self.html_doc.iter(element))
except StopIteration:
raise Exception("<{0}> not in {1}".format(element, self.html))
else:
if attributes:
arg_attrs = set(attributes.items())
tag_attrs = set(tag.items())
self.assertTrue(arg_attrs.issubset(tag_attrs))
if text:
self.assertIn(text, tag.text)
class ReSTExtensionTestCaseTestCase(ReSTExtensionTestCase):
""" Simple test for our base class :) """
sample = '.. raw:: html\n\n <iframe src="foo" height="bar">spam</iframe>'
def test_test(self):
self.basic_test()
self.assertHTMLContains("iframe", attributes={"src": "foo"},
text="spam")
self.assertRaises(Exception, self.assertHTMLContains, "eggs", {})
class MathTestCase(ReSTExtensionTestCase):
sample = ':math:`e^{ix} = \cos x + i\sin x`'
def test_math(self):
""" Test that math is outputting TeX code."""
self.basic_test()
self.assertHTMLContains("span", attributes={"class": "math"},
text="\(e^{ix} = \cos x + i\sin x\)")
class GistTestCase(ReSTExtensionTestCase):
""" Test GitHubGist.
We will replace get_raw_gist() and get_raw_gist_with_filename()
monkeypatching the GitHubGist class for avoiding network dependency
"""
gist_type = gist.GitHubGist
sample = '.. gist:: fake_id\n :file: spam.py'
sample_without_filename = '.. gist:: fake_id2'
def setUp(self):
""" Patch GitHubGist for avoiding network dependency """
super(GistTestCase, self).setUp()
self.gist_type.get_raw_gist_with_filename = lambda *_: 'raw_gist_file'
self.gist_type.get_raw_gist = lambda *_: "raw_gist"
_reload(nikola.plugins.compile.rest)
@pytest.mark.skipif(True, reason="This test indefinitely skipped.")
def test_gist(self):
""" Test the gist directive with filename """
self.setHtmlFromRst(self.sample)
output = 'https://gist.github.com/fake_id.js?file=spam.py'
self.assertHTMLContains("script", attributes={"src": output})
self.assertHTMLContains("pre", text="raw_gist_file")
@pytest.mark.skipif(True, reason="This test indefinitely skipped.")
def test_gist_without_filename(self):
""" Test the gist directive without filename """
self.setHtmlFromRst(self.sample_without_filename)
output = 'https://gist.github.com/fake_id2.js'
self.assertHTMLContains("script", attributes={"src": output})
self.assertHTMLContains("pre", text="raw_gist")
class GistIntegrationTestCase(ReSTExtensionTestCase):
""" Test requests integration. The gist plugin uses requests to fetch gist
contents and place it in a noscript tag.
"""
sample = '.. gist:: 1812835'
def test_gist_integration(self):
""" Fetch contents of the gist from GH and render in a noscript tag """
self.basic_test()
text = ('Be alone, that is the secret of invention: be alone, that is'
' when ideas are born. -- Nikola Tesla')
self.assertHTMLContains('pre', text=text)
class SlidesTestCase(ReSTExtensionTestCase):
""" Slides test case """
sample = '.. slides:: IMG.jpg\n'
def test_slides(self):
""" Test the slides js generation and img tag creation """
self.basic_test()
self.assertHTMLContains("img", attributes={"src": "IMG.jpg"})
class SoundCloudTestCase(ReSTExtensionTestCase):
""" SoundCloud test case """
sample = '.. soundcloud:: SID\n :height: 400\n :width: 600'
def test_soundcloud(self):
""" Test SoundCloud iframe tag generation """
self.basic_test()
self.assertHTMLContains("iframe",
attributes={"src": ("https://w.s | oundcloud.com"
"/player/?url=http://"
"api.soundcloud | .com/"
"tracks/SID"),
"height": "400", "width": "600"})
class VimeoTestCase(ReSTExtensionTestCase):
"""Vimeo test.
Set Vimeo.request_size to False for avoiding querying the Vimeo api
over the network
"""
sample = '.. vimeo:: VID\n :height: 400\n :width: 600'
def setUp(self):
""" Disable query of the vimeo api over the wire """
vimeo.Vimeo.request_size = False
super(VimeoTestCase, self).setUp()
_reload(nikola.plugins.compile.rest)
def test_vimeo(self):
""" Test Vimeo iframe tag generation """
self.basic_test()
self.assertHTMLContains("iframe",
attributes={"src": ("//player.vimeo.com/"
"video/VID"),
"height": "400", "width": "600"})
class Youtu |
agoravoting/authapi | authapi/captcha/urls.py | Python | agpl-3.0 | 874 | 0.001144 | # This file is part of authapi.
# Copyright (C) 2014-2020 Agora Voting SL <contact@nvotes.com>
# authapi is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public Lic | ense as published by
# the Free Software Foundation, either version 3 of the License.
# authapi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with authapi. If not, see <http://www.gnu.org/licenses/>.
from djan | go.conf.urls import url
from .decorators import captcha_required
from captcha import views
urlpatterns = [
url(r'^new/', views.new_captcha, name='new_captcha'),
]
|
SamHames/scikit-image | skimage/feature/texture.py | Python | bsd-3-clause | 10,468 | 0.000287 | """
Methods to characterize image textures.
"""
import numpy as np
from ._texture import _glcm_loop, _local_binary_pattern
def greycomatrix(image, distances, angles, levels=256, symmetric=False,
normed=False):
"""Calculate the grey-level co-occurrence matrix.
A grey level co-occurence matrix is a histogram of co-occuring
greyscale values at a given offset over an image.
Parameters
----------
image : array_like of uint8
Integer typed input image. The image will be cast to uint8, so
the maximum value must be less than 256.
distances : array_like
List of pixel pair distance offsets.
angles : array_like
List of pixel pair angles in radians.
levels : int, optional
The input image should contain integers in [0, levels-1],
where levels indicate the number of grey-levels counted
(typically 256 for an 8-bit image). The maximum value is
256.
symmetric : bool, optional
If True, the output matrix `P[:, :, d, theta]` is symmetric. This
is accomplished by ignoring the order of value pairs, so both
(i, j) and (j, i) are accumulated when (i, j) is encountered
for a given offset. The default is False.
normed : bool, optional
If True, normalize each matrix `P[:, :, d, theta]` by dividing
by the total number of accumulated co-occurrences for the given
offset. The elements of the resulting matrix sum to 1. The
default is False.
Returns
-------
P : 4-D ndarray
The grey-level co-occurrence histogram. The value
`P[i,j,d,theta]` is the number of times that grey-level `j`
occurs at a distance `d` and at an angle `theta` from
grey-level `i`. If `normed` is `False`, the output is of
type uint32, otherwise it is float64.
References
----------
.. [1] The GLCM Tutorial Home Page,
http://www.fp.ucalgary.ca/mhallbey/tutorial.htm
.. [2] Pattern Recognition Engineering, Morton Nadler & Eric P.
Smith
.. [3] Wikipedia, http://en.wikipedia.org/wiki/Co-occurrence_matrix
Examples
--------
Compute 2 GLCMs: One for a 1-pixel offset to the right, and one
for a 1-pixel offset upwards.
>>> image = np.array([[0, 0, 1, 1],
... [0, 0, 1, 1],
... [0, 2, 2, 2],
... [2, 2, 3, 3]], dtype=np.uint8)
>>> result = greycomatrix(image, [1], [0, np.pi/4, np.pi/2, 3*np.pi/4], levels=4)
>>> result[:, :, 0, 0]
array([[2, 2, 1, 0],
[0, 2, 0, 0],
[0, 0, 3, 1],
[0, 0, 0, 1]], dtype=uint32)
>>> result[:, :, 0, 1]
array([[1, 1, 3, 0],
[0, 1, 1, 0],
[0, 0, 0, 2],
[0, 0, 0, 0]], dtype=uint32)
>>> result[:, :, 0, 2]
array([[3, 0, 2, 0],
[0, 2, 2, 0],
[0, 0, 1, 2],
[0, 0, 0, 0]], dtype=uint32)
>>> | result[:, :, 0, 3]
array([[2, 0, 0, 0],
[1, 1, 2, 0],
[0, 0, 2, 1],
[0, 0, 0, 0]], dtype=uint32)
""" |
assert levels <= 256
image = np.ascontiguousarray(image)
assert image.ndim == 2
assert image.min() >= 0
assert image.max() < levels
image = image.astype(np.uint8)
distances = np.ascontiguousarray(distances, dtype=np.float64)
angles = np.ascontiguousarray(angles, dtype=np.float64)
assert distances.ndim == 1
assert angles.ndim == 1
P = np.zeros((levels, levels, len(distances), len(angles)),
dtype=np.uint32, order='C')
# count co-occurences
_glcm_loop(image, distances, angles, levels, P)
# make each GLMC symmetric
if symmetric:
Pt = np.transpose(P, (1, 0, 2, 3))
P = P + Pt
# normalize each GLMC
if normed:
P = P.astype(np.float64)
glcm_sums = np.apply_over_axes(np.sum, P, axes=(0, 1))
glcm_sums[glcm_sums == 0] = 1
P /= glcm_sums
return P
def greycoprops(P, prop='contrast'):
"""Calculate texture properties of a GLCM.
Compute a feature of a grey level co-occurrence matrix to serve as
a compact summary of the matrix. The properties are computed as
follows:
- 'contrast': :math:`\\sum_{i,j=0}^{levels-1} P_{i,j}(i-j)^2`
- 'dissimilarity': :math:`\\sum_{i,j=0}^{levels-1}P_{i,j}|i-j|`
- 'homogeneity': :math:`\\sum_{i,j=0}^{levels-1}\\frac{P_{i,j}}{1+(i-j)^2}`
- 'ASM': :math:`\\sum_{i,j=0}^{levels-1} P_{i,j}^2`
- 'energy': :math:`\\sqrt{ASM}`
- 'correlation':
.. math:: \\sum_{i,j=0}^{levels-1} P_{i,j}\\left[\\frac{(i-\\mu_i) \\
(j-\\mu_j)}{\\sqrt{(\\sigma_i^2)(\\sigma_j^2)}}\\right]
Parameters
----------
P : ndarray
Input array. `P` is the grey-level co-occurrence histogram
for which to compute the specified property. The value
`P[i,j,d,theta]` is the number of times that grey-level j
occurs at a distance d and at an angle theta from
grey-level i.
prop : {'contrast', 'dissimilarity', 'homogeneity', 'energy', \
'correlation', 'ASM'}, optional
The property of the GLCM to compute. The default is 'contrast'.
Returns
-------
results : 2-D ndarray
2-dimensional array. `results[d, a]` is the property 'prop' for
the d'th distance and the a'th angle.
References
----------
.. [1] The GLCM Tutorial Home Page,
http://www.fp.ucalgary.ca/mhallbey/tutorial.htm
Examples
--------
Compute the contrast for GLCMs with distances [1, 2] and angles
[0 degrees, 90 degrees]
>>> image = np.array([[0, 0, 1, 1],
... [0, 0, 1, 1],
... [0, 2, 2, 2],
... [2, 2, 3, 3]], dtype=np.uint8)
>>> g = greycomatrix(image, [1, 2], [0, np.pi/2], levels=4,
... normed=True, symmetric=True)
>>> contrast = greycoprops(g, 'contrast')
>>> contrast
array([[ 0.58333333, 1. ],
[ 1.25 , 2.75 ]])
"""
assert P.ndim == 4
(num_level, num_level2, num_dist, num_angle) = P.shape
assert num_level == num_level2
assert num_dist > 0
assert num_angle > 0
# create weights for specified property
I, J = np.ogrid[0:num_level, 0:num_level]
if prop == 'contrast':
weights = (I - J) ** 2
elif prop == 'dissimilarity':
weights = np.abs(I - J)
elif prop == 'homogeneity':
weights = 1. / (1. + (I - J) ** 2)
elif prop in ['ASM', 'energy', 'correlation']:
pass
else:
raise ValueError('%s is an invalid property' % (prop))
# compute property for each GLCM
if prop == 'energy':
asm = np.apply_over_axes(np.sum, (P ** 2), axes=(0, 1))[0, 0]
results = np.sqrt(asm)
elif prop == 'ASM':
results = np.apply_over_axes(np.sum, (P ** 2), axes=(0, 1))[0, 0]
elif prop == 'correlation':
results = np.zeros((num_dist, num_angle), dtype=np.float64)
I = np.array(range(num_level)).reshape((num_level, 1, 1, 1))
J = np.array(range(num_level)).reshape((1, num_level, 1, 1))
diff_i = I - np.apply_over_axes(np.sum, (I * P), axes=(0, 1))[0, 0]
diff_j = J - np.apply_over_axes(np.sum, (J * P), axes=(0, 1))[0, 0]
std_i = np.sqrt(np.apply_over_axes(np.sum, (P * (diff_i) ** 2),
axes=(0, 1))[0, 0])
std_j = np.sqrt(np.apply_over_axes(np.sum, (P * (diff_j) ** 2),
axes=(0, 1))[0, 0])
cov = np.apply_over_axes(np.sum, (P * (diff_i * diff_j)),
axes=(0, 1))[0, 0]
# handle the special case of standard deviations near zero
mask_0 = std_i < 1e-15
mask_0[std_j < 1e-15] = True
results[mask_0] = 1
# handle the standard case
mask_1 = mask_0 == False
results[mask_1] = cov[mask_1] / (std_i[mask_1] * std_j[mask_1])
elif prop in ['contrast', 'dissimilarity', 'homogeneity']:
weights = weights.reshape((num_level, num_level, 1, 1))
|
Yelp/paasta | paasta_tools/setup_kubernetes_job.py | Python | apache-2.0 | 7,313 | 0.00082 | #!/usr/bin/env python
# Copyright 2015-2018 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Usage: ./setup_kubernetes_job.py <service.instance> [options]
Command line options:
- -d <SOA_DIR>, --soa-dir <SOA_DIR>: Specify a SOA config dir to read from
- -v, --verbose: Verbose output
"""
import argparse
import logging
import sys
from typing import Optional
from typing import Sequence
from typing import Tuple
from paasta_tools.kubernetes.application.controller_wrappers import Application
from paasta_tools.kubernetes.application.controller_wrappers import (
get_application_wrapper,
)
from paasta_tools.kubernetes_tools import ensure_namespace
from paasta_tools.kubernetes_tools import InvalidKubernetesConfig
from paasta_tools.kubernetes_tools import KubeClient
from paasta_tools.kubernetes_tools import list_all_deployments
from paasta_tools.kubernetes_tools import load_kubernetes_service_config_no_cache
from paasta_tools.utils import decompose_job_id
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import InvalidJobNameError
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import NoConfigurationForServiceError
from paasta_tools.utils import NoDeploymentsAvailable
from paasta_tools.utils import SPACER
log = logging.getLogger(__name__)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Creates Kubernetes jobs.")
parser.add_argument(
"service_instance_list",
nargs="+",
help="The list of Kubernetes service instances to create or update",
metavar="SERVICE%sINSTANCE" % SPACER,
)
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
parser.add_argument(
"-c", "--cluster", dest="cluster", help="paasta cluster",
)
parser.add_argument(
"-v", "--verbose", action="store_true", dest="verbose", default=False,
)
parser.add_argument(
"-l",
"--rate-limit",
dest="rate_limit",
default=0,
metavar="LIMIT",
type=int,
help="Update or create up to this number of service instances. Default is 0 (no limit).",
)
args = parser.parse_args()
return args
def main() -> None:
args = parse_args()
soa_dir = args.soa_dir
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
# filter out unwanted zookeeper messages in the log
logging.getLogger("kazoo").setLevel(logging.WARN)
logging.basicConfig(level=logging.INFO)
# system_paasta_config = load_system_paasta_config()
kube_client = KubeClient()
ensure_namespace(kube_client, namespace="paasta")
setup_kube_succeeded = setup_kube_deployments(
kube_client=kube_client,
service_instances=args.service_instance_list,
soa_dir=soa_dir,
cluster=args.cluster or load_system_paasta_config().get_cluster(),
rate_limit=args.rate_limit,
)
sys.exit(0 if setup_kube_succeeded else 1)
def validate_job_name(service_instance: str) -> bool:
try:
service, instance, _, __ = decompose_job_ | id(service_instance)
except InvalidJobNameError:
log.error(
"Invalid service instance specified. Format is service%sinstance." % SPACER
)
return Fals | e
return True
def setup_kube_deployments(
kube_client: KubeClient,
service_instances: Sequence[str],
cluster: str,
rate_limit: int = 0,
soa_dir: str = DEFAULT_SOA_DIR,
) -> bool:
if service_instances:
existing_kube_deployments = set(list_all_deployments(kube_client))
existing_apps = {
(deployment.service, deployment.instance)
for deployment in existing_kube_deployments
}
service_instances_with_valid_names = [
decompose_job_id(service_instance)
for service_instance in service_instances
if validate_job_name(service_instance)
]
applications = [
create_application_object(
kube_client=kube_client,
service=service_instance[0],
instance=service_instance[1],
cluster=cluster,
soa_dir=soa_dir,
)
for service_instance in service_instances_with_valid_names
]
api_updates = 0
for _, app in applications:
if app:
try:
if (
app.kube_deployment.service,
app.kube_deployment.instance,
) not in existing_apps:
log.info(f"Creating {app} because it does not exist yet.")
app.create(kube_client)
api_updates += 1
elif app.kube_deployment not in existing_kube_deployments:
log.info(f"Updating {app} because configs have changed.")
app.update(kube_client)
api_updates += 1
else:
log.info(f"{app} is up-to-date!")
log.info(f"Ensuring related API objects for {app} are in sync")
app.update_related_api_objects(kube_client)
except Exception:
log.exception(f"Error while processing: {app}")
if rate_limit > 0 and api_updates >= rate_limit:
log.info(
f"Not doing any further updates as we reached the limit ({api_updates})"
)
break
return (False, None) not in applications and len(
service_instances_with_valid_names
) == len(service_instances)
def create_application_object(
kube_client: KubeClient, service: str, instance: str, cluster: str, soa_dir: str,
) -> Tuple[bool, Optional[Application]]:
try:
service_instance_config = load_kubernetes_service_config_no_cache(
service, instance, cluster, soa_dir=soa_dir,
)
except NoDeploymentsAvailable:
log.debug(
"No deployments found for %s.%s in cluster %s. Skipping."
% (service, instance, cluster)
)
return True, None
except NoConfigurationForServiceError:
error_msg = (
f"Could not read kubernetes configuration file for %s.%s in cluster %s"
% (service, instance, cluster)
)
log.error(error_msg)
return False, None
try:
formatted_application = service_instance_config.format_kubernetes_app()
except InvalidKubernetesConfig as e:
log.error(str(e))
return False, None
app = get_application_wrapper(formatted_application)
app.load_local_config(soa_dir, cluster)
return True, app
if __name__ == "__main__":
main()
|
sonya/eea | py/run_usa.py | Python | apache-2.0 | 111 | 0.027027 | #!/usr/bin/python3
#import usa.matrices
#import usa.total_energy
import usa.emissions
#imp | ort | usa.food_sector
|
os2webscanner/os2webscanner | django-os2webscanner/os2webscanner/__init__.py | Python | mpl-2.0 | 53 | 0 | """Django module for the | OS2datascanner project." | ""
|
junhuac/MQUIC | src/tools/android/loading/user_satisfied_lens.py | Python | mit | 5,080 | 0.006496 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Identifies key events related to user satisfaction.
Several lenses are defined, for example FirstTextPaintLens and
FirstSignificantPaintLens.
"""
import logging
import operator
class _UserSatisfiedLens(object):
"""A base class for all user satisfaction metrics.
All of these work by identifying a user satisfaction event from the trace, and
then building a set of request ids whose loading is needed to achieve that
event. Subclasses need only provide the time computation. The base class will
use that to construct the request ids.
"""
def __init__(self, trace):
"""Initialize the lens.
Args:
trace: (LoadingTrace) the trace to use in the analysis.
"""
self._satisfied_msec = None
self._event_msec = None
self._CalculateTimes(trace.tracing_track)
critical_requests = self._RequestsBefore(
trace.request_track, self._satisfied_msec)
self._critical_request_ids = set(rq.request_id for rq in critical_requests)
if critical_requests:
last_load = max(rq.end_msec for rq in critical_requests)
else:
last_load = float('inf')
self._postload_msec = self._event_msec - last_load
def CriticalRequests(self):
"""Request ids of critical requests.
Returns:
A set of request ids (as strings) of an estimate of all requests that are
necessary for the user satisfaction defined by this class.
"""
return self._critical_request_ids
def PostloadTimeMsec(self):
"""Return postload time.
The postload time is an estimate of the amount of time needed by chrome to
transform the critical results into the satisfying event.
Returns:
Postload time in milliseconds.
"""
return self._postload_msec
def _CalculateTimes(self, tracing_track):
"""Subclasses should implement to set _satisfied_msec and _event_msec."""
raise NotImplementedError
@classmethod
def _RequestsBefore(cls, request_track, time_ms):
return [rq for rq in request_track.GetEvents()
if rq.end_msec <= time_ms]
class _FirstEventLens(_UserSatisfiedLens):
"""Helper abstract subclass that defines users first event manipulations."""
# pylint can't handle abstract subclasses.
# pylint: disable=abstract-method
@classmethod
def _ExtractFirstTiming(cls, times):
if not times:
return float('inf')
if len(times) != 1:
# TODO(mattcary): in some cases a trace has two first paint events. Why?
logging.error('%d %s with spread of %s', len(times),
str(cls), max(times) - min(times))
return float(min(times))
class FirstTextPaintLens(_FirstEventLens):
"""Define satisfaction by the first text paint.
This event is taken directly from a trace.
"""
def _CalculateTimes(self, tracing_track):
first_paints = [e.start_msec for e in tracing_track.GetEvents()
if e.Matches('blink.user_timing', 'firstPaint')]
self._satisfied_msec = self._event_msec = \
self._ExtractFirstTiming(first_paints)
class FirstContentfulPaintLens(_FirstEventLens):
"""Define satisfaction by the first contentful paint.
This event is taken directly from a trace. Internally to chrome it's computed
by filtering out things like background paint from firstPaint.
"""
def _CalculateTimes(self, tracing_track):
first_paints = [e.start_msec for e in tracing_t | rack.GetEvents()
if e.Matches('blink.user_timing', 'firstContentfulPaint')]
self._satisfied_msec = self._event_msec = \
self._ExtractFirstTiming(first_paints)
class FirstSignificantPaintLens(_FirstEventLens):
"""Define satisfaction by the first paint after a big layout change.
Our satisfaction time is that of the layout change, as all resources must have
been loaded to compute th | e layout. Our event time is that of the next paint as
that is the observable event.
"""
FIRST_LAYOUT_COUNTER = 'LayoutObjectsThatHadNeverHadLayout'
def _CalculateTimes(self, tracing_track):
sync_paint_times = []
layouts = [] # (layout item count, msec).
for e in tracing_track.GetEvents():
# TODO(mattcary): is this the right paint event? Check if synchronized
# paints appear at the same time as the first*Paint events, above.
if e.Matches('blink', 'FrameView::SynchronizedPaint'):
sync_paint_times.append(e.start_msec)
if ('counters' in e.args and
self.FIRST_LAYOUT_COUNTER in e.args['counters']):
layouts.append((e.args['counters'][self.FIRST_LAYOUT_COUNTER],
e.start_msec))
assert layouts, ('No layout events, was the disabled-by-default-blink'
'.debug.layout category enabled?')
layouts.sort(key=operator.itemgetter(0), reverse=True)
self._satisfied_msec = layouts[0][1]
self._event_msec = self._ExtractFirstTiming([
min(t for t in sync_paint_times if t > self._satisfied_msec)])
|
Fokko/incubator-airflow | airflow/operators/pig_operator.py | Python | apache-2.0 | 2,775 | 0 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
from typing import Optional
from airflow.hooks.pig_hook import PigCliHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class PigOperator(BaseOperator):
"""
Executes pig script.
:param pig: the pig latin script to be executed. (templated)
:type pig: str
:param pig_cli_conn_id: reference to the Hive database
:type pig_cli_conn_id: str
:param pigparams_jinja_translate: when True, pig params-type templ | ating
${var} gets translated into jinja-type templating {{ | var }}. Note that
you may want to use this along with the
``DAG(user_defined_macros=myargs)`` parameter. View the DAG
object documentation for more details.
:type pigparams_jinja_translate: bool
:param pig_opts: pig options, such as: -x tez, -useHCatalog, ...
:type pig_opts: str
"""
template_fields = ('pig',)
template_ext = ('.pig', '.piglatin',)
ui_color = '#f0e4ec'
@apply_defaults
def __init__(
self,
pig: str,
pig_cli_conn_id: str = 'pig_cli_default',
pigparams_jinja_translate: bool = False,
pig_opts: Optional[str] = None,
*args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.pigparams_jinja_translate = pigparams_jinja_translate
self.pig = pig
self.pig_cli_conn_id = pig_cli_conn_id
self.pig_opts = pig_opts
def get_hook(self):
return PigCliHook(pig_cli_conn_id=self.pig_cli_conn_id)
def prepare_template(self):
if self.pigparams_jinja_translate:
self.pig = re.sub(
r"(\$([a-zA-Z_][a-zA-Z0-9_]*))", r"{{ \g<2> }}", self.pig)
def execute(self, context):
self.log.info('Executing: %s', self.pig)
self.hook = self.get_hook()
self.hook.run_cli(pig=self.pig, pig_opts=self.pig_opts)
def on_kill(self):
self.hook.kill()
|
ProjexSoftware/projex | setup.py | Python | lgpl-3.0 | 2,436 | 0.004516 | import os
import re
import subprocess
from setuptools import setup, find_packages, Command
try:
with open('projex/_version.py', 'r') as f:
content = f.read()
major = re.search('__major__ = (\d+)', content).group(1)
minor = re.search('__minor__ = (\d+)', content).group(1)
rev = re.search('__revision__ = (\d+)', content).group(1)
version = '.'.join((major, minor, rev))
except StandardError:
version = '0.0.0'
class tag(Command):
description = 'Command used to release new versions of the website to the internal pypi server.'
user_options = [
('no-tag', None, 'Do not tag the repo before releasing')
]
def initialize_options(self):
self.no_tag = False
def finalize_options(self):
pass
def run(self):
# generate the version information from the current git commit
cmd = ['git', 'describe', '--match', 'v[0-9]*.[0-9]*.0']
desc = subprocess.check_output(cmd).strip()
result = re.match('v([0-9]+)\.([0-9]+)\.0-([0-9]+)-(.*)', desc)
print 'generating version information from:', desc
with open('./projex/_version.py', 'w') as f:
f.write('__major__ = {0}\n'.format(result.group(1)))
f.write('__minor__ = {0}\n'.format(result.group(2)))
f.write('__revision__ = {0}\n'.format(result.group(3)))
f.write('__hash__ = "{0}"'.format(result.group(4)))
# tag this new release version
if not self.no_tag:
version = '.'.join([result.group(1), result.group(2), result.group(3)])
print 'creating git tag:', 'v' + version
os.system('git tag -a v{0} -m "releasing {0}"'.format(version))
os.system('git push --tags')
else:
print 'warning: tagging ignored...'
setup(
name='projex' | ,
version=version,
author='Eric Hulser',
author_email='eric.hulser@gmail.com',
maintainer='Eric Hulser',
maintainer_email='eric.hulser@gmail.com',
description='Library of useful utilities for Python.',
license='MIT',
keywords='',
url='https://github.com/ProjexSoftware/projex',
include_package_data=True,
scripts=[os.path.join('projex', 'scripts', 'xbuild.py')],
packages=find_packages(),
cmdclass={
'tag': tag
},
tests_require=[],
l | ong_description='Library of useful utilities for Python.',
classifiers=[],
)
|
paninetworks/neutron | neutron/services/l3_router/brocade/mlx/l3_router_plugin.py | Python | apache-2.0 | 2,212 | 0 | # Copyright 2015 Brocade Communications Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Implementation of Brocade L3RouterPlugin for MLX switches."""
from networking_brocade.mlx.services.l3_router.brocade import (
l3_router_plugin as plugin)
from oslo_config import cfg
SWITCHES = [
cfg.StrOpt(
'switch_names',
default='',
help=('Switches connected to the compute nodes'))]
L3_BROCADE = [cfg.StrOpt('address', default='',
help=('The IP address of t | he MLX switch')),
cfg.StrOpt('username', default='admin',
help=('The SSH username of the switch')),
cfg.StrOpt('password', default='password', secret=True,
help=('The SSH password of the switch')),
cfg.StrOpt('physical_networks', default='',
help=('Allowed physical networks where VLAN can '
'be configured on this switc | h')),
cfg.StrOpt('ports', default='',
help=('Ports to be tagged in the VLAN being '
'configured on the switch')),
]
cfg.CONF.register_opts(SWITCHES, 'l3_brocade_mlx')
cfg.CONF.register_opts(L3_BROCADE, 'L3_BROCADE_MLX_EXAMPLE')
class BrocadeRouterPlugin(plugin.BrocadeRouterPlugin):
def __init__(self):
self._switch_names = cfg.CONF.l3_brocade_mlx.switch_names
switches = [x.strip() for x in self._switch_names.split(',')]
for switch in switches:
cfg.CONF.register_opts(L3_BROCADE, switch)
super(BrocadeRouterPlugin, self).__init__()
|
wujuguang/scrapy | tests/test_pipeline_files.py | Python | bsd-3-clause | 16,352 | 0.00263 | import os
import random
import time
import hashlib
import warnings
from tempfile import mkdtemp
from shutil import rmtree
from six.moves.urllib.parse import urlparse
from six import BytesIO
from twisted.trial import unittest
from twisted.internet import defer
from scrapy.pipelines.files import FilesPipeline, FSFilesStore, S3FilesStore, GCSFilesStore
from scrapy.item import Item, Field
from scrapy.http import Request, Response
from scrapy.settings import Settings
from scrapy.utils.python import to_bytes
from scrapy.utils.test import assert_aws_environ, get_s3_content_and_delete
from scrapy.utils.test import assert_gcs_environ, get_gcs_content_and_delete
from scrapy.utils.boto import is_botocore
from tests import mock
def _mocked_download_func(request, info):
response = request.meta.get('response')
return response() if callable(response) else response
class FilesPipelineTestCase(unittest.TestCase):
def setUp(self):
self.tempdir = mkdtemp()
self.pipeline = FilesPipeline.from_settings(Settings({'FILES_STORE': self.tempdir}))
self.pipeline.download_func = _mocked_download_func
self.pipeline.open_spider(None)
def tearDown(self):
rmtree(self.tempdir)
def test_file_path(self):
file_path = self.pipeline.file_path
| self.assertEqual(file_path(Request("https://dev.mydeco.com/mydeco.pdf")),
'full/c9b564df929f4bc63 | 5bdd19fde4f3d4847c757c5.pdf')
self.assertEqual(file_path(Request("http://www.maddiebrown.co.uk///catalogue-items//image_54642_12175_95307.txt")),
'full/4ce274dd83db0368bafd7e406f382ae088e39219.txt')
self.assertEqual(file_path(Request("https://dev.mydeco.com/two/dirs/with%20spaces%2Bsigns.doc")),
'full/94ccc495a17b9ac5d40e3eabf3afcb8c2c9b9e1a.doc')
self.assertEqual(file_path(Request("http://www.dfsonline.co.uk/get_prod_image.php?img=status_0907_mdm.jpg")),
'full/4507be485f38b0da8a0be9eb2e1dfab8a19223f2.jpg')
self.assertEqual(file_path(Request("http://www.dorma.co.uk/images/product_details/2532/")),
'full/97ee6f8a46cbbb418ea91502fd24176865cf39b2')
self.assertEqual(file_path(Request("http://www.dorma.co.uk/images/product_details/2532")),
'full/244e0dd7d96a3b7b01f54eded250c9e272577aa1')
self.assertEqual(file_path(Request("http://www.dorma.co.uk/images/product_details/2532"),
response=Response("http://www.dorma.co.uk/images/product_details/2532"),
info=object()),
'full/244e0dd7d96a3b7b01f54eded250c9e272577aa1')
def test_fs_store(self):
assert isinstance(self.pipeline.store, FSFilesStore)
self.assertEqual(self.pipeline.store.basedir, self.tempdir)
path = 'some/image/key.jpg'
fullpath = os.path.join(self.tempdir, 'some', 'image', 'key.jpg')
self.assertEqual(self.pipeline.store._get_filesystem_path(path), fullpath)
@defer.inlineCallbacks
def test_file_not_expired(self):
item_url = "http://example.com/file.pdf"
item = _create_item_with_files(item_url)
patchers = [
mock.patch.object(FilesPipeline, 'inc_stats', return_value=True),
mock.patch.object(FSFilesStore, 'stat_file', return_value={
'checksum': 'abc', 'last_modified': time.time()}),
mock.patch.object(FilesPipeline, 'get_media_requests',
return_value=[_prepare_request_object(item_url)])
]
for p in patchers:
p.start()
result = yield self.pipeline.process_item(item, None)
self.assertEqual(result['files'][0]['checksum'], 'abc')
for p in patchers:
p.stop()
@defer.inlineCallbacks
def test_file_expired(self):
item_url = "http://example.com/file2.pdf"
item = _create_item_with_files(item_url)
patchers = [
mock.patch.object(FSFilesStore, 'stat_file', return_value={
'checksum': 'abc',
'last_modified': time.time() - (self.pipeline.expires * 60 * 60 * 24 * 2)}),
mock.patch.object(FilesPipeline, 'get_media_requests',
return_value=[_prepare_request_object(item_url)]),
mock.patch.object(FilesPipeline, 'inc_stats', return_value=True)
]
for p in patchers:
p.start()
result = yield self.pipeline.process_item(item, None)
self.assertNotEqual(result['files'][0]['checksum'], 'abc')
for p in patchers:
p.stop()
class FilesPipelineTestCaseFields(unittest.TestCase):
def test_item_fields_default(self):
class TestItem(Item):
name = Field()
file_urls = Field()
files = Field()
for cls in TestItem, dict:
url = 'http://www.example.com/files/1.txt'
item = cls({'name': 'item1', 'file_urls': [url]})
pipeline = FilesPipeline.from_settings(Settings({'FILES_STORE': 's3://example/files/'}))
requests = list(pipeline.get_media_requests(item, None))
self.assertEqual(requests[0].url, url)
results = [(True, {'url': url})]
pipeline.item_completed(results, item, None)
self.assertEqual(item['files'], [results[0][1]])
def test_item_fields_override_settings(self):
class TestItem(Item):
name = Field()
files = Field()
stored_file = Field()
for cls in TestItem, dict:
url = 'http://www.example.com/files/1.txt'
item = cls({'name': 'item1', 'files': [url]})
pipeline = FilesPipeline.from_settings(Settings({
'FILES_STORE': 's3://example/files/',
'FILES_URLS_FIELD': 'files',
'FILES_RESULT_FIELD': 'stored_file'
}))
requests = list(pipeline.get_media_requests(item, None))
self.assertEqual(requests[0].url, url)
results = [(True, {'url': url})]
pipeline.item_completed(results, item, None)
self.assertEqual(item['stored_file'], [results[0][1]])
class FilesPipelineTestCaseCustomSettings(unittest.TestCase):
default_cls_settings = {
"EXPIRES": 90,
"FILES_URLS_FIELD": "file_urls",
"FILES_RESULT_FIELD": "files"
}
file_cls_attr_settings_map = {
("EXPIRES", "FILES_EXPIRES", "expires"),
("FILES_URLS_FIELD", "FILES_URLS_FIELD", "files_urls_field"),
("FILES_RESULT_FIELD", "FILES_RESULT_FIELD", "files_result_field")
}
def setUp(self):
self.tempdir = mkdtemp()
def tearDown(self):
rmtree(self.tempdir)
def _generate_fake_settings(self, prefix=None):
def random_string():
return "".join([chr(random.randint(97, 123)) for _ in range(10)])
settings = {
"FILES_EXPIRES": random.randint(100, 1000),
"FILES_URLS_FIELD": random_string(),
"FILES_RESULT_FIELD": random_string(),
"FILES_STORE": self.tempdir
}
if not prefix:
return settings
return {prefix.upper() + "_" + k if k != "FILES_STORE" else k: v for k, v in settings.items()}
def _generate_fake_pipeline(self):
class UserDefinedFilePipeline(FilesPipeline):
EXPIRES = 1001
FILES_URLS_FIELD = "alfa"
FILES_RESULT_FIELD = "beta"
return UserDefinedFilePipeline
def test_different_settings_for_different_instances(self):
"""
If there are different instances with different settings they should keep
different settings.
"""
custom_settings = self._generate_fake_settings()
another_pipeline = FilesPipeline.from_settings(Settings(custom_settings))
one_pipeline = FilesPipeline(self.tempdir)
for pipe_attr, settings_attr, pipe_ins_attr in self.file_cls_attr_settings_map:
default_value = self.default_cls_settings[pipe_attr]
self.ass |
vitay/ANNarchy | ANNarchy/generator/Sanity.py | Python | gpl-2.0 | 13,045 | 0.006439 | #===============================================================================
#
# Sanity.py
#
# This file is part of ANNarchy.
#
# Copyright (C) 2013-2016 Julien Vitay <julien.vitay@gmail.com>,
# Helge Uelo Dinkelbach <helge.dinkelbach@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ANNarchy is distribute | d in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#=========================================== | ====================================
import re
from ANNarchy.core import Global
from ANNarchy.core.PopulationView import PopulationView
from ANNarchy.models.Synapses import DefaultSpikingSynapse, DefaultRateCodedSynapse
# No variable can have these names
reserved_variables = [
't',
'dt',
't_pre',
't_post',
't_last',
'last_spike',
'rk_post',
'rk_pre',
'i',
'j',
'active',
'refractory',
'size',
]
def check_structure(populations, projections):
"""
Checks the structure before compilation to display more useful error messages.
"""
from ANNarchy.extensions.convolution.Transpose import Transpose
# Check variable names
_check_reserved_names(populations, projections)
# Check that projections are created before compile
for proj in projections:
if isinstance(proj, Transpose):
continue
if not proj._connection_method:
Global._error('The projection between populations', proj.pre.id, 'and', proj.post.id, 'has not been connected.',
' Call a connector method before compiling the network.')
# Check if the storage formats are valid for the selected paradigm
_check_storage_formats(projections)
# Check that synapses access existing variables in the pre or post neurons
_check_prepost(populations, projections)
# Check locality of variable is respected
_check_locality(populations, projections)
def check_experimental_features(populations, projections):
"""
The idea behind this method, is to check if new experimental features are used. This
should help also the user to be aware of changes.
"""
# CPU-related formats
if Global.config['paradigm'] == "openmp":
for proj in projections:
if proj._storage_format == "csr" and proj._storage_order == "pre_to_post":
Global._warning("Compressed sparse row (CSR) and pre_to_post ordering representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "bsr":
Global._warning("Blocked sparse row (BSR) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "coo":
Global._warning("Coordinate (COO) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "ellr":
Global._warning("ELLPACK-R (ELLR) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "ell":
Global._warning("ELLPACK (ELL) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "hyb":
Global._warning("Hybrid (ELL + COO) representation is an experimental feature, we greatly appreciate bug reports.")
break
# GPU-related formats
elif Global.config['paradigm'] == "cuda":
for pop in populations:
if pop.neuron_type.description['type'] == "spike":
Global._warning('Spiking neurons on GPUs is an experimental feature. We greatly appreciate bug reports.')
break
for proj in projections:
if proj._storage_format == "ellr":
Global._warning("ELLPACK-R (ELLR) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "bsr":
Global._warning("Blocked sparse row (BSR) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "coo":
Global._warning("Coordinate (COO) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "hyb":
Global._warning("Hybrid (ELL + COO) representation is an experimental feature, we greatly appreciate bug reports.")
break
else:
pass
def _check_reserved_names(populations, projections):
"""
Checks no reserved variable names is redefined
"""
# Check populations
for pop in populations:
# Reserved variable names
for term in reserved_variables:
if term in pop.attributes:
Global._print(pop.neuron_type.parameters)
Global._print(pop.neuron_type.equations)
Global._error(term + ' is a reserved variable name')
# Check projections
for proj in projections:
# Reserved variable names
for term in reserved_variables:
if term in proj.attributes:
Global._print(proj.synapse_type.parameters)
Global._print(proj.synapse_type.equations)
Global._error(term + ' is a reserved variable name')
def _check_storage_formats(projections):
"""
ANNarchy 4.7 introduced a set of sparse matrix formats. Some of them are not implemented for
all paradigms or might not support specific optimizations.
"""
for proj in projections:
# Most of the sparse matrix formats are not trivially invertable and therefore we can not implement
# spiking models with them
if proj.synapse_type.type == "spike" and proj._storage_format in ["ell", "ellr", "coo", "hyb"]:
raise Global.ANNarchyException("Using 'storage_format="+ proj._storage_format + "' is not allowed for spiking synapses.", True)
# For some of the sparse matrix formats we don't implemented plasticity yet.
if proj.synapse_type.type == "spike" and proj._storage_format in ["dense"] and not isinstance(proj.synapse_type, DefaultSpikingSynapse):
raise Global.ANNarchyException("Using 'storage_format="+ proj._storage_format + "' is only allowed for default spiking synapses yet.", True)
# For some of the sparse matrix formats we don't implemented plasticity yet.
if proj.synapse_type.type == "rate" and proj._storage_format in ["coo", "hyb"] and not isinstance(proj.synapse_type, DefaultRateCodedSynapse):
raise Global.ANNarchyException("Using 'storage_format="+ proj._storage_format + "' is only allowed for default rate-coded synapses yet.", True)
# OpenMP disabled?
if proj._storage_format in ["bsr"] and Global.config["num_threads"]>1:
raise Global.ANNarchyException("Using 'storage_format="+ proj._storage_format + "' is not available for OpenMP yet.", True)
# Single weight optimization available?
if proj._has_single_weight() and proj._storage_format in ["dense"]:
raise Global.ANNarchyException("Using 'storage_format= |
umitproject/network-admin | netadmin/events/search_indexes.py | Python | agpl-3.0 | 1,285 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Adriano Monteiro Marques
#
# Author: Piotrek Wasilewski <wasilewski.piotrek@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of t | he GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even | the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from models import Event
try:
import search
from search.core import startswith
search.register(Event, ('short_message', 'message'), indexer=startswith)
except ImportError:
from haystack import indexes
from haystack import site
class EventIndex(indexes.SearchIndex):
text = indexes.CharField(document=True, use_template=True)
def index_queryset(self):
return Event.objects.all()
site.register(Event, EventIndex)
|
cmouse/buildbot | master/buildbot/reporters/words.py | Python | gpl-2.0 | 50,996 | 0.001452 | # coding: utf-8
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import random
import re
import shlex
from twisted.internet import defer
from twisted.internet import protocol
from twisted.internet import reactor
from twisted.python import log
from twisted.python import usage
from twisted.web import resource
from twisted.web | import server
from buildbot import util
from buildbot import version
from buildbot.data import resultspec
from buildbot.plugins.db import get_plugins
from buildbot.process.properties impo | rt Properties
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.process.results import statusToString
from buildbot.reporters import utils
from buildbot.util import epoch2datetime
from buildbot.util import service
from buildbot.util import unicode2bytes
# Used in command_HELLO and it's test. 'Hi' in 100 languages.
GREETINGS = [
"ږغ كول ، هركلى كول ږغ، هركلى", "Goeie dag", "Tungjatjeta",
"Yatasay", "Ahlan bik", "Voghdzuyin", "hola", "kaixo", "Horas",
"Pryvitańnie", "Nomoskar", "Oki", "Selam", "Dez-mat", "Zdrávejte",
"Mingala ba", "Hola", "Hafa dai", "Oh-see-YOH", "Nín hao", "Bonjou",
"Zdravo", "Nazdar", "Hallo", "Hallo", "Iiti", "Kotáka", "Saluton", "Tere",
"Hallo", "Hallo", "Bula", "Helo", "Hei", "Goede morgen", "Bonjour", "Hoi",
"Ola", "Gamardžoba", "Guten Tag", "Mauri", "Geia!", "Inuugujoq", "Kem cho",
"Sannu", "Aloha", "Shalóm", "Namasté", "Szia", "Halló", "Hai", "Kiana",
"Dia is muire dhuit", "Buongiorno", "Kónnichi wa", "Salam",
"Annyeonghaseyo", "Na", "Sabai dii", "Ave", "Es mīlu tevi", "Labas.",
"Selamat petang", "Ni hao", "Kia ora", "Yokwe", "Kwe", "sain baina uu",
"niltze", "Yá'át'ééh", "Namaste", "Hallo.", "Salâm", "Witajcie", "Olá",
"Kâils", "Aroha", "Salut", "Privét", "Talofa", "Namo namah", "ćao",
"Nazdar", "Zdravo", "Hola", "Jambo", "Hej", "Sälü", "Halo", "Selam",
"Sàwàtdee kráp", "Dumela", "Merhaba", "Pryvít", "Adaab arz hai", "Chào",
"Glidis", "Helo", "Sawubona", "Hoi"]
class UsageError(ValueError):
# pylint: disable=useless-super-delegation
def __init__(self, string="Invalid usage", *more):
# This is not useless as we change the default value of an argument.
# This bug is reported as "fixed" but apparently, it is not.
# https://github.com/PyCQA/pylint/issues/1085
# (Maybe there is a problem with builtin exceptions).
super().__init__(string, *more)
class ForceOptions(usage.Options):
optParameters = [
["builder", None, None, "which Builder to start"],
["codebase", None, "", "which codebase to build"],
["branch", None, "master", "which branch to build"],
["revision", None, "HEAD", "which revision to build"],
["project", None, "", "which project to build"],
["reason", None, None, "the reason for starting the build"],
["props", None, None,
"A set of properties made available in the build environment, "
"format is --properties=prop1=value1,prop2=value2,.. "
"option can be specified multiple times."],
]
def parseArgs(self, *args):
args = list(args)
if args:
if self['builder'] is not None:
raise UsageError("--builder provided in two ways")
self['builder'] = args.pop(0)
if args: # args might be modified above
if self['reason'] is not None:
raise UsageError("--reason provided in two ways")
self['reason'] = " ".join(args)
dangerous_commands = []
def dangerousCommand(method):
command = method.__name__
if not command.startswith('command_'):
raise ValueError('@dangerousCommand can be used only for commands')
dangerous_commands.append(command[8:])
return method
class Channel(service.AsyncService):
"""
This class holds what should be shared between users on a single channel.
In particular it is responsible for maintaining notification states and
send notifications.
"""
def __init__(self, bot, channel):
self.name = "Channel({})".format(channel)
self.id = channel
self.bot = bot
self.notify_events = set()
self.subscribed = []
self.build_subscriptions = []
self.reported_builds = [] # tuples (when, buildername, buildnum)
self.missing_workers = set()
self.useRevisions = bot.useRevisions
def send(self, message, **kwargs):
return self.bot.send_message(self.id, message, **kwargs)
def stopService(self):
if self.subscribed:
self.unsubscribe_from_build_events()
def validate_notification_event(self, event):
if not re.compile("^(started|finished|success|warnings|failure|exception|"
"cancelled|problem|recovery|worse|better|worker|"
# this is deprecated list
"(success|warnings|failure|exception)To"
"(Success|Warnings|Failure|Exception))$").match(event):
raise UsageError("Try '" + self.bot.commandPrefix + "notify on|off _EVENT_'.")
@defer.inlineCallbacks
def list_notified_events(self):
if self.notify_events:
yield self.send("The following events are being notified: {}."
.format(", ".join(sorted(self.notify_events))))
else:
yield self.send("No events are being notified.")
def notify_for(self, *events):
for event in events:
if event in self.notify_events:
return True
return False
@defer.inlineCallbacks
def subscribe_to_build_events(self):
startConsuming = self.master.mq.startConsuming
def buildStarted(key, msg):
return self.buildStarted(msg)
def buildFinished(key, msg):
return self.buildFinished(msg)
def workerEvent(key, msg):
if key[2] == 'missing':
return self.workerMissing(msg)
if key[2] == 'connected':
return self.workerConnected(msg)
return None
for e, f in (("new", buildStarted), # BuilderStarted
("finished", buildFinished)): # BuilderFinished
handle = yield startConsuming(f, ('builders', None, 'builds', None, e))
self.subscribed.append(handle)
handle = yield startConsuming(workerEvent, ('workers', None, None))
self.subscribed.append(handle)
def unsubscribe_from_build_events(self):
# Cancel all the subscriptions we have
old_list, self.subscribed = self.subscribed, []
for handle in old_list:
handle.stopConsuming()
def add_notification_events(self, events):
for event in events:
self.validate_notification_event(event)
self.notify_events.add(event)
if not self.subscribed:
self.subscribe_to_build_events()
def remove_notification_events(self, events):
for event in events:
self.validate_notification_event(event)
self.notify_events.remove(event)
if not self.notify_events:
self.unsubscribe_from_build_events()
|
dmlc/tvm | tests/python/relay/aot/aot_test_utils.py | Python | apache-2.0 | 30,132 | 0.002323 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import datetime
import itertools
import json
import logging
import os
import pathlib
import platform
import re
import shutil
import subprocess
import tarfile
from typing import Any, NamedTuple, Union, Optional, List, Dict
import pytest
import numpy as np
pytest.importorskip("tvm.micro")
import tvm
from tvm import relay
from tvm import te
from tvm.contrib import utils, graph_executor
from tvm.relay.backend import te_compiler, Executor, Runtime
from tvm.relay.backend.te_compiler import TECompiler
from tvm.relay.backend.utils import mangle_module_name
from tvm.micro import export_model_library_format
from tvm.micro.testing import mlf_extract_workspace_size_bytes
_LOG = logging.getLogger(__name__)
AOT_SUCCESS_TOKEN = "AOT_TEST_SUCCESS"
AOT_FAILURE_TOKEN = "AOT_TEST_FAILURE"
class AOTTestModel(NamedTuple):
"""Class to describe a model under test
Parameters
----------
module: tvm.IRModule
IRModule to generate AOT executor for
inputs: Dict[str, np.array]
Dict of input names to value arrays
outputs: List[np.array]
Dict of output names to value arrays
output_tolerance: Optional[Union[int, float]]
Allowed tolerance of the output
name: str
Name to use for this model
params: Optional[Dict[str, np.array]]
Dict of parameter names to value arrays
extra_memory_in_bytes: int
Extra memory to allocate after planned memory
"""
module: tvm.IRModule
inputs: Dict[str, np.array]
outputs: Dict[str, np.array]
output_tolerance: Optional[Union[int, float]] = None
name: str = "default"
params: Optional[Dict[str, np.array]] = None
extra_memory_in_bytes: int = 0
class AOTCompiledTestModel(NamedTuple):
"""A compiled AOTTestModel with associated module
Parameters
----------
model: AOTTestModel
Input model to be compiled
module: tvm.runtime.Module
The compiled Module for the associated AOTTestModel
"""
model: AOTTestModel
executor_factory: tvm.relay.backend.executor_factory.AOTExecutorFactoryModule
class AOTDataLinkage(NamedTuple):
"""A compiled AOTTestModel with associated module
Parameters
----------
section: str
Named section to place data into
alignment: int
Section alignment
"""
section: str
alignment: int
class AOTTestRunner(NamedTuple):
"""Class to describe a test runner for AOT code
Parameters
----------
makefile: str
Premade Makefile to use from the AOT test folder
prologue: str
Code to prepend to the main function
epilogue: str
Code to append to the main function
includes: List[str]
Additional includes required to run the AOT test runner
parameters: Dict[str, str]
Additional parameters to pass to the make command
pass_config: Dict[str, Any]
Additional pass configuration when building the model
"""
makefile: str = "default"
prologue: str = ""
epilogue: str = ""
includes: List[str] = []
parameters: Dict[str, str] = {}
pass_config: Dict[str, Any] = {}
AOT_DEFAULT_RUNNER = AOTTestRunner()
# AOT Test Runner using the Arm® Corstone™-300 Reference Systems
# see: https://developer.arm.com/ip-products/subsystem/corstone/corstone-300
AOT_CORSTONE300_RUNNER = AOTTestRunner(
makefile="corstone300",
prologue="""
uart_init();
""",
includes=["uart.h"],
pass_config={
"relay.ext.cmsisnn.options": {
"mcpu": "cortex-m55",
}
},
)
def mangle_name(mod_name, name):
mod_name = mangle_module_name(mod_name)
return mod_name + "_" + name
def convert_to_relay(
tflite_model_buf,
):
"""Convert a tflite model buffer in a Relay module"""
# TFLite.Model.Model has changed to TFLite.Model from 1.14 to 2.1
try:
import tflite.Model
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model_buf, 0)
except AttributeError:
import tflite
tflite_model = tflite.Model.GetRootAsModel(tflite_model_buf, 0)
except ImportError:
raise ImportError("The tflite package must be installed")
mod, params = relay.frontend.from_tflite(tflite_model)
mod["main"] = relay.build_module.bind_params_by_name(mod["main"], params)
return mod, params
def parametrize_aot_options(test):
"""Parametrize over valid option combinations"""
skip_i386 = pytest.mark.skipif(
platform.machine() == "i686", reason="Reference system unavailable in i386 container"
)
requires_arm_eabi = pytest.mark.skipif(
shutil.which("arm-none-eabi-gcc") is None, reason="ARM embedded toolchain unavailable"
)
interface_api = ["packed", "c"]
use_unpacked_api = [True, False]
test_runner = [AOT_DEFAULT_RUNNER, AOT_CORSTONE300_RUNNER]
all_combinations = itertools.product(interface_api, use_unpacked_api, test_runner)
# Filter out packed operators with c int | erface
valid_combinations = filter(
lambda parameters: not (parameters[0] == "c" and not parameters[1]),
all_combinations,
)
# Only use reference system for C interface and unpacked API calls
valid_combinations = filter(
lambda parameters: not (
parameters[2] == AOT_CORSTONE300_RUNNER
and (parameters[0] == "packed" or not parameters[1])
),
| valid_combinations,
)
# Skip reference system tests if running in i386 container
marked_combinations = map(
lambda parameters: pytest.param(*parameters, marks=[skip_i386, requires_arm_eabi])
if parameters[2] == AOT_CORSTONE300_RUNNER
else parameters,
valid_combinations,
)
return pytest.mark.parametrize(
["interface_api", "use_unpacked_api", "test_runner"],
marked_combinations,
)(test)
def subprocess_log_output(cmd, cwd, logfile):
"""
This method runs a process and logs the output to both a log file and stdout
"""
_LOG.info("Execute (%s): %s", cwd, cmd)
cmd_base = cmd[0] if isinstance(cmd, (list, tuple)) else cmd.split(" ", 1)[0]
proc = subprocess.Popen(
cmd, cwd=cwd, shell=True, bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
with open(logfile, "ab") as f:
f.write(
bytes(
"\n"
+ "-" * 80
+ f"{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}: Execute ({cwd}): {cmd}\n"
+ "-" * 80,
"utf-8",
)
)
while True:
data = proc.stdout.readline()
_LOG.debug("%s: %s", cmd_base, str(data, "utf-8", "replace").rstrip("\n"))
f.write(data)
# process is done if there is no data and the result is valid
if not data: # EOF
break
return proc.wait()
# TODO: Move to linker script with list of symbols rather than coding into source
def emit_data_linkage(output_file, data_linkage):
if data_linkage is not None:
output_file.write(
f'__attribute__((section("{data_linkage.section}"), aligned({data_linkage.alignment}))) '
)
def emit_main_prologue(
main_file, custom_prologue, workspace_bytes, data_linkage, compiled_models, interface_api
):
# Add TVM_RUNTIME_ALLOC_ALIGNMENT_BYTES because of memory alignment.
|
dmnfarrell/smallrnaseq | smallrnaseq/config.py | Python | gpl-3.0 | 4,475 | 0.01676 | #!/usr/bin/env python
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRA | NTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundatio | n, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
Module for smallrnaseq configuration file. Used with command line app.
Created Jan 2017
Copyright (C) Damien Farrell
"""
from __future__ import absolute_import, print_function
import sys, os, string, time
import types, re, subprocess, glob, shutil
import pandas as pd
try:
import configparser
except:
import ConfigParser as configparser
path = os.path.dirname(os.path.abspath(__file__))
datadir = os.path.join(path, 'data')
from . import aligners
baseoptions = {'base': [('filenames',''),('path',''),('overwrite',0),
('adapter',''),
('index_path','indexes'),
('libraries',''),
('ref_fasta',''),('features',''),
('output','results'),('add_labels',0),
('aligner','bowtie'),
('mirna',0),('species','hsa'),('pad5',3),('pad3',5),
('verbose', 1),
('cpus',1)],
'aligner': [('default_params','-v 1 --best'),
('mirna_params',aligners.BOWTIE_MIRBASE_PARAMS)],
'novel': [('score_cutoff',.7), ('read_cutoff',100),
('strict',0)],
'de': [('count_file',''),('sample_labels',''),('sep',','),
('sample_col',''),('factors_col',''),
('conditions',''),('logfc_cutoff',1.5),
('de_plot','point')]
}
def write_default_config(conffile='default.conf', defaults={}):
"""Write a default config file"""
if not os.path.exists(conffile):
cp = create_config_parser_from_dict(defaults, ['base','novel','aligner','de'])
cp.write(open(conffile,'w'))
print ('wrote config file %s' %conffile)
return conffile
def create_config_parser_from_dict(data, sections, **kwargs):
"""Helper method to create a ConfigParser from a dict and/or keywords"""
cp = configparser.ConfigParser()
for s in sections:
cp.add_section(s)
if not data.has_key(s):
continue
for i in data[s]:
name,val = i
cp.set(s, name, str(val))
#use kwargs to create specific settings in the appropriate section
for s in cp.sections():
opts = cp.options(s)
for k in kwargs:
if k in opts:
cp.set(s, k, kwargs[k])
return cp
def parse_config(conffile=None):
"""Parse a configparser file"""
f = open(conffile,'r')
cp = configparser.ConfigParser()
try:
cp.read(conffile)
except Exception as e:
print ('failed to read config file! check format')
print ('Error returned:', e)
return
f.close()
return cp
def get_options(cp):
"""Makes sure boolean opts are parsed"""
from collections import OrderedDict
options = OrderedDict()
#options = cp._sections['base']
for section in cp.sections():
options.update( (cp._sections[section]) )
for o in options:
for section in cp.sections():
try:
options[o] = cp.getboolean(section, o)
except:
pass
try:
options[o] = cp.getint(section, o)
except:
pass
return options
def print_options(options):
"""Print option key/value pairs"""
for key in options:
print (key, ':', options[key])
print ()
def check_options(opts):
"""Check for missing default options in dict. Meant to handle
incomplete config files"""
sections = baseoptions.keys()
for s in sections:
defaults = dict(baseoptions[s])
for i in defaults:
if i not in opts:
opts[i] = defaults[i]
return opts
|
jvrsantacruz/XlsxWriter | xlsxwriter/test/comparison/test_hyperlink14.py | Python | bsd-2-clause | 1,222 | 0.000818 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), | 2013-2015, John McN | amara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'hyperlink14.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with hyperlinks. This example has writes a url in a range."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
format = workbook.add_format({'align': 'center'})
worksheet.merge_range('C4:E5', '', format,)
worksheet.write_url('C4', 'http://www.perl.org/', format, 'Perl Home')
workbook.close()
self.assertExcelEqual()
|
adhoc-dev/odoo-logistic | addons/logistic_x/__init__.py | Python | agpl-3.0 | 1,161 | 0 | # -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import waybill
import wizard
import travel
import vehicle
import requirement
import r | es_partner
import waybill_expense
import account_invoice
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
wolfstein9119/django-russian-fields | russian_fields/gender.py | Python | bsd-2-clause | 975 | 0 | from django.db import models
from django.core.validators import MinLengthValidator
from django.utils.translation import ugettext_lazy as _
class GENDERField(models.CharField):
description = 'Gender'
DEFAULT_MAX_LENGTH = DEFAULT_MIN_LENGTH = | 1
GENDER_MALE = 'M'
GENDER_FEMALE = 'F'
GENDER_CHOICES = (
(GENDER_MALE, _('Male')),
(GENDER_FEMALE, _('Female')),
)
def __init__(self, *args, **kwargs):
max_length = self.DEFAULT_MAX_LENGTH
min_length = self.DEFAULT_MIN_LENGTH
kwargs['max_length'] = max_length
kwargs['choices'] = s | elf.GENDER_CHOICES
super(GENDERField, self).__init__(*args, **kwargs)
self.validators.extend([
MinLengthValidator(min_length),
])
def deconstruct(self):
name, path, args, kwargs = super(GENDERField, self).deconstruct()
del kwargs['max_length']
del kwargs['choices']
return name, path, args, kwargs
|
ForestClaw/forestclaw | applications/clawpack/advection/2d/swirl/p_00002.py | Python | bsd-2-clause | 454 | 0.028634 | # comment = "Torus example : eff. resolution = 2048 x 2048"
import sys
import os
import subprocess
import random
np = 2
exec = " | swirl"
arg_list = ["mpirun","-n",str(np),exec,"--inifile=timing.ini"]
jobid = random.randint(1000,9999)
outfile = "{:s}_0000{:d}.o{:d}".format(exec,np,jobid)
f = open(outfile,'w')
po = subprocess.Popen(arg_list,stdout=f)
print("Starting process {:d} with jobid {:d} on {:d} processor(s).".format(po.pid,jobid,np))
#po.wait()
| |
WhiteMagic/JoystickGremlin | container_plugins/double_tap/__init__.py | Python | gpl-3.0 | 13,349 | 0.000075 | # -*- coding: utf-8; -*-
# Copyright (C) 2015 - 2019 Lionel Ott
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import copy
import logging
import threading
import time
from xml.etree import ElementTree
from PyQt5 import QtWidgets
import gremlin
import gremlin.ui.common
import gremlin.ui.input_item
class DoubleTapContainerWidget(gremlin.ui.input_item.AbstractContainerWidget):
"""DoubleTap container for actions for double or single taps."""
def __init__(self, profile_data, parent=None):
"""Creates a new instance.
:param profile_data the profile data represented by this widget
:param parent the parent of this widget
"""
super().__init__(profile_data, parent)
def _create_action_ui(self):
"""Creates the UI components."""
self.profile_data.create_or_delete_virtual_button()
self.options_layout = QtWidgets.QHBoxLayout()
# Activation delay
self.options_layout.addWidget(
QtWidgets.QLabel("<b>Dou | ble-tap delay: </b>")
)
self.delay_input = gremlin.ui.common.DynamicDoubleSpinBox()
self.delay_input.setRange(0.1, 2.0)
| self.delay_input.setSingleStep(0.1)
self.delay_input.setValue(0.5)
self.delay_input.setValue(self.profile_data.delay)
self.delay_input.valueChanged.connect(self._delay_changed_cb)
self.options_layout.addWidget(self.delay_input)
self.options_layout.addStretch()
# Activation moment
self.options_layout.addWidget(QtWidgets.QLabel("<b>Single/Double Tap: </b>"))
self.activate_exclusive = QtWidgets.QRadioButton("exclusive")
self.activate_combined = QtWidgets.QRadioButton("combined")
if self.profile_data.activate_on == "combined":
self.activate_combined.setChecked(True)
else:
self.activate_exclusive.setChecked(True)
self.activate_combined.toggled.connect(self._activation_changed_cb)
self.activate_exclusive.toggled.connect(self._activation_changed_cb)
self.options_layout.addWidget(self.activate_exclusive)
self.options_layout.addWidget(self.activate_combined)
self.action_layout.addLayout(self.options_layout)
if self.profile_data.action_sets[0] is None:
self._add_action_selector(
lambda x: self._add_action(0, x),
"Single Tap"
)
else:
self._create_action_widget(
0,
"Single Tap",
self.action_layout,
gremlin.ui.common.ContainerViewTypes.Action
)
if self.profile_data.action_sets[1] is None:
self._add_action_selector(
lambda x: self._add_action(1, x),
"Double Tap"
)
else:
self._create_action_widget(
1,
"Double Tap",
self.action_layout,
gremlin.ui.common.ContainerViewTypes.Action
)
def _create_condition_ui(self):
if self.profile_data.activation_condition_type == "action":
if self.profile_data.action_sets[0] is not None:
self._create_action_widget(
0,
"Single Tap",
self.activation_condition_layout,
gremlin.ui.common.ContainerViewTypes.Condition
)
if self.profile_data.action_sets[1] is not None:
self._create_action_widget(
1,
"Double Tap",
self.activation_condition_layout,
gremlin.ui.common.ContainerViewTypes.Condition
)
def _add_action_selector(self, add_action_cb, label):
"""Adds an action selection UI widget.
:param add_action_cb function to call when an action is added
:param label the description of the action selector
"""
action_selector = gremlin.ui.common.ActionSelector(
self.profile_data.get_input_type()
)
action_selector.action_added.connect(add_action_cb)
group_layout = QtWidgets.QVBoxLayout()
group_layout.addWidget(action_selector)
group_layout.addStretch(1)
group_box = QtWidgets.QGroupBox(label)
group_box.setLayout(group_layout)
self.action_layout.addWidget(group_box)
def _create_action_widget(self, index, label, layout, view_type):
"""Creates a new action widget.
:param index the index at which to store the created action
:param label the name of the action to create
"""
widget = self._create_action_set_widget(
self.profile_data.action_sets[index],
label,
view_type
)
layout.addWidget(widget)
widget.redraw()
widget.model.data_changed.connect(self.container_modified.emit)
def _add_action(self, index, action_name):
"""Adds a new action to the container.
:param action_name the name of the action to add
"""
plugin_manager = gremlin.plugin_manager.ActionPlugins()
action_item = plugin_manager.get_class(action_name)(self.profile_data)
if self.profile_data.action_sets[index] is None:
self.profile_data.action_sets[index] = []
self.profile_data.action_sets[index].append(action_item)
self.profile_data.create_or_delete_virtual_button()
self.container_modified.emit()
def _delay_changed_cb(self, value):
"""Updates the activation delay value.
:param value the value after which the double-tap action activates
"""
self.profile_data.delay = value
def _activation_changed_cb(self, value):
"""Updates the activation condition state.
:param value whether or not the selection was toggled - ignored
"""
if self.activate_combined.isChecked():
self.profile_data.activate_on = "combined"
else:
self.profile_data.activate_on = "exclusive"
def _handle_interaction(self, widget, action):
"""Handles interaction icons being pressed on the individual actions.
:param widget the action widget on which an action was invoked
:param action the type of action being invoked
"""
index = self._get_widget_index(widget)
if index != -1:
if index == 0 and self.profile_data.action_sets[0] is None:
index = 1
self.profile_data.action_sets[index] = None
self.container_modified.emit()
def _get_window_title(self):
"""Returns the title to use for this container.
:return title to use for the container
"""
if self.profile_data.is_valid():
return "Double Tap: ({}) / ({})".format(
", ".join([a.name for a in self.profile_data.action_sets[0]]),
", ".join([a.name for a in self.profile_data.action_sets[1]])
)
else:
return "DoubleTap"
class DoubleTapContainerFunctor(gremlin.base_classes.AbstractFunctor):
"""Executes the contents of the associated DoubleTap container."""
def __init__(self, container):
super().__init__(container)
self.single_tap = gremlin.execution_graph.ActionSetExecutionGraph(
container.action_sets[0]
)
self.double_tap = gremlin.execution_graph.ActionSetExecutionGraph(
container.action_sets[1]
)
self.delay = co |
Aravinthu/odoo | addons/mail/controllers/bus.py | Python | agpl-3.0 | 3,026 | 0.003966 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import SUPERUSER_ID
from odoo.http import request, route
from odoo.addons.bus.controllers.main import BusController
class MailChatController(BusController):
def _default_request_uid(self):
""" For Anonymous people, they receive the access right of SUPERUSER_ID since they have NO access (auth=none)
!!! Each time a method from this controller is call, there is a check if the user (who can be anonymous and Sudo access)
can access to the resource.
"""
return request.session.uid and request.session.uid or SUPERUSER_ID
# --------------------------
# Extends BUS Controller Poll
# --------------------------
def _poll(self, dbname, channels, last, options):
if request.session.uid:
partner_id = request.env.user.partner_id.id
if partner_id:
channels = list(channels) # do not alter original list
for mail_channel in request.env['mail.channel'].search([('channel_partner_ids', 'in', [partner_ | id])]):
channels.append((request.db, 'mail.channel', mail_channel.id))
# personal and needaction channel
channels.append((request.db, 'res.partner', partner_id))
channels.append((request.db, 'ir.needaction', partner_id))
return super(MailChatController, self)._poll(dbname, channels, last, options)
# --------------- | -----------
# Anonymous routes (Common Methods)
# --------------------------
@route('/mail/chat_post', type="json", auth="none")
def mail_chat_post(self, uuid, message_content, **kwargs):
request_uid = self._default_request_uid()
# find the author from the user session, which can be None
author_id = False # message_post accept 'False' author_id, but not 'None'
if request.session.uid:
author_id = request.env['res.users'].sudo().browse(request.session.uid).partner_id.id
# post a message without adding followers to the channel. email_from=False avoid to get author from email data
mail_channel = request.env["mail.channel"].sudo(request_uid).search([('uuid', '=', uuid)], limit=1)
message = mail_channel.sudo(request_uid).with_context(mail_create_nosubscribe=True).message_post(author_id=author_id, email_from=False, body=message_content, message_type='comment', subtype='mail.mt_comment', content_subtype='plaintext', **kwargs)
return message and message.id or False
@route(['/mail/chat_history'], type="json", auth="none")
def mail_chat_history(self, uuid, last_id=False, limit=20):
request_uid = self._default_request_uid()
channel = request.env["mail.channel"].sudo(request_uid).search([('uuid', '=', uuid)], limit=1)
if not channel:
return []
else:
return channel.sudo(request_uid).channel_fetch_message(last_id, limit)
|
IllusionRom-deprecated/android_platform_tools_idea | python/testData/formatter/continuationIndentForCallInStatementPart_after.py | Python | apache-2.0 | 108 | 0.009259 | for item in really_long_name_of_the_function_wit | h_a_lot_of_pat | ams(
param1, param2, param3):
pass |
toomoresuch/pysonengine | parts/gaeunit/test/HtmlTestCaseTest.py | Python | mit | 3,299 | 0.003637 | '''
Created on May 5, 2009
@author: george
'''
import unittest
import gaeunit
class Test(unittest.TestCase):
tc = gaeunit.GAETestCase("run")
def test_html_compare_ignorable_blank(self):
html1 = """ <div> test text
</div> """
html2 = """<div>test text</div>"""
self.tc.assertHtmlEqual(html1, html2)
def test_html_compare_unignorable_blank(self):
html1 = """<div>test text</div>"""
html2 = """<div>testtext</div>"""
self.assertRaises(AssertionError, self.tc.assertHtmlEqual, html1, html2)
def test_kill_extra_blank(self):
html1 = """ < div class="aaa"> test test\t </div > """
html2 = """<div class="aaa">test test</div>"""
self.assertEqual(self.tc._formalize(html1), html2)
def test_replace_return_sign(self):
html1 = """test
test\r\n"""
html2 = """test test """
self.assertEqual(self.tc._formalize(html1), html2)
def test_unescape(self):
html1 = "< & >"
html2 = "< & >"
self.assertEqual(self.tc._formalize(html1), html2)
def test_findHtmlDifference(self):
html1 = "abcdef"
html2 = "abccef"
result_expected = "\nabcdef\nabccef\n___^"
| result = self.tc._findHtmlDifference(html | 1, html2)
self.assertEqual(result, result_expected)
def test_findHtmlDifference_long(self):
html1 = "aaaaabbbbbcccccdddddeeeeefffffggggghhhhhiiiiijjjjjkkkkk"
html2 = "aaaaabbbbbcccccdddddeeeeeeffffggggghhhhhiiiiijjjjjkkkkk"
result_expected = "\n...bbbbbcccccdddddeeeeefffffggggghhhhhiiiiij...\n...bbbbbcccccdddddeeeeeeffffggggghhhhhiiiiij...\n_______________________^"
result = self.tc._findHtmlDifference(html1, html2)
self.assertEqual(result, result_expected)
def test_findHtmlDifference_long_leftmost(self):
html1 = "aaaaabbbbbcccccdddddeeeeefffffggggghhhhhiiiiijjjjjkkkkk"
html2 = "aaaabbbbbbcccccdddddeeeeefffffggggghhhhhiiiiijjjjjkkkkk"
result_expected = "\naaaaabbbbbcccccdddddeeeeefffffggggghhhhhi...\naaaabbbbbbcccccdddddeeeeefffffggggghhhhhi...\n____^"
result = self.tc._findHtmlDifference(html1, html2)
self.assertEqual(result, result_expected)
def test_findHtmlDifference_long_rightmost(self):
html1 = "aaaaabbbbbcccccdddddeeeeefffffggggghhhhhiiiiijjjjjkkkkk"
html2 = "aaaaabbbbbcccccdddddeeeeefffffggggghhhhhiiiiijjjjkkkkkk"
result_expected = "\n...cdddddeeeeefffffggggghhhhhiiiiijjjjjkkkkk\n...cdddddeeeeefffffggggghhhhhiiiiijjjjkkkkkk\n______________________________________^"
result = self.tc._findHtmlDifference(html1, html2)
self.assertEqual(result, result_expected)
class SystemTest(gaeunit.GAETestCase):
def test_html_compare_ignorable_blank(self):
html1 = """ <div> test text
</div> """
html2 = """<div>test text</div>"""
self.assertHtmlEqual(html1, html2)
def test_html_compare_unignorable_blank(self):
html1 = """<div>test text</div>"""
html2 = """<div>testtext</div>"""
self.assertRaises(AssertionError, self.assertHtmlEqual, html1, html2)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() |
uiureo/demae | tests/test_util.py | Python | mit | 245 | 0 | from demae.util import split_size
def test_split_size():
assert split_size(list(range(6)), 3) == [[0, 1], [2, 3], [4, 5]]
assert spli | t_size(list(range(7)), 3) == [[0, 1], [2, 3], [4, 5, | 6]]
assert split_size([], 3) == [[], [], []]
|
cernbox/entf | IO_testing/payload_gen.py | Python | agpl-3.0 | 4,528 | 0.006405 | #-------------------------------------------------------------------------------
# Produce random files
# Support file to generate content
#-------------------------------------------------------------------------------
#from PIL import Image
import numpy
import uuid
import array
import os
import random, tempfile
word_list = []
#-------------------------------------------------------------------------------
# Reads the word list to use for the 'readable' text tests
#-------------------------------------------------------------------------------
def load_word_list():
# Word list from FreeBSD Dict
f = open("wordlist", "r")
for line in f:
word_list.append(line.strip())
return
'''
# TODO:
# - This is by now not supported as PIL (pillow) package
# is required to produce output and may not be available locally
#-------------------------------------------------------------------------------
# Create a valid but random image (jpeg or png)
#-------------------------------------------------------------------------------
def get_image(size, extension):
fsize = 0
fname = os.path.join(tempfile.gettempdir(), \
'%s.%s' % (str(uuid.uuid4()), extension))
# This will give a max JPEG of few mega
x_max = 3500
x_min = 0
while x_max >= x_min:
# Mid of the interval
x = 1.0 * (x_max + x_min) / 2
# save the image
a = numpy.random.rand(x,x,3) * 255
image_out = Image.fromarray(a.astype('uint8')).convert('RGBA')
image_out.save(fname)
fsize = os.path.getsize(fname)
# lets go on binary search for the size
diff = (1.0 * (fsize - size)/ size)
if abs(diff) > 0.01 and diff > 0:
x_max = x
elif abs(diff) > 0.01 and diff < 0:
x_min = x
elif x_max - x_min <= | 2.0:
break
| else:
break
# got it!
return fname
'''
#-------------------------------------------------------------------------------
# Produces a random text from the word_list given the size in bytes.
# Returns exactly this amount of bytes, eventually truncating a word.
#-------------------------------------------------------------------------------
def get_text(size):
t = ""
while len(t) < size:
t += random.choice(word_list)
# We want to read the text :)
if random.random() > 0.1:
t += " "
else:
t += "\r\n"
t = t[0:size]
return t
#-------------------------------------------------------------------------------
# Produces a random bunch of bytes given the size in bytes
# Returns exactly this amount of bytes.
#-------------------------------------------------------------------------------
def get_binary(size):
return bytearray(random.getrandbits(8) for i in range(size))
#-------------------------------------------------------------------------------
# Generic wrapper for file generation
# Prepares the file for the test given size and the file type
#
# File types:
# 1. Random bytes
# 2. Random text
# 3. JPEG image (extension, magic numbers, random pixels)
# 4. Fake JPEG image (extension, magic numbers, plain text)
#-------------------------------------------------------------------------------
def make_file(size, file_type):
if file_type == 1:
# put random data in the file -- call it a gzip
rand_bytes = bytearray(random.getrandbits(8) for i in range(size-2))
tfile = tempfile.NamedTemporaryFile(delete=False, suffix=".gz")
fname = tfile.name
tfile.write(array.array('B', "1f8b".decode("hex")))
tfile.write(rand_bytes)
tfile.close()
return fname
elif file_type == 2:
# start-up the dictionary
load_word_list()
# put a readable text in the file
tfile = tempfile.NamedTemporaryFile(delete=False, suffix=".txt")
fname = tfile.name
tfile.write(get_text(size))
tfile.close()
return fname
elif file_type == 3:
# put a valid jpeg image with random pixels
return get_image(size, "jpeg")
elif file_type == 4:
# create a file with magic number of jpeg, jpeg extension,
# but only normal text afterward
tfile = tempfile.NamedTemporaryFile(delete=False, suffix=".jpeg")
fname = tfile.name
tfile.write(array.array('B', "ffd8ffe0".decode("hex")))
tfile.write(get_text(size-4))
tfile.close()
return fname
|
alviano/wasp | tests/asp/cautious/count.example5.cautious.asp.test.py | Python | apache-2.0 | 418 | 0 | input = """
1 2 0 0
1 3 0 0 |
1 4 0 0
1 5 0 0
1 6 2 1 7 8
1 7 2 1 6 8
1 8 0 0
1 9 2 1 10 11
1 10 2 1 9 11
1 11 0 0
1 12 2 1 13 14
1 13 2 1 12 14
1 14 0 0
1 15 2 1 16 17
1 16 2 1 15 17
1 17 0 0
2 18 2 0 2 12 6
1 1 1 0 18
2 19 2 0 2 15 9
1 1 1 0 19
0
6 a(b,2)
9 a(b,1)
12 a(a,2)
15 a(a,1)
4 c(a)
5 c(b)
7 na(b,2)
10 na(b,1)
13 na(a,2)
16 na(a,1)
2 b(1)
3 b(2)
0
B+
0
B-
1
0
1
"""
output = """
{b(1), b(2), c(a), c(b | )}
"""
|
Ruben0001/Mango | setup.py | Python | mit | 1,060 | 0 | import codecs
import os
from distutils.core import setup
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
"""
Build an absolute path from *parts* and and ret | urn the contents of the
resulting file. Assume UTF-8 encoding.
"""
with codecs.open(os.path.join(HERE, *parts), "rb", "utf-8") as f:
return f.read()
setup(
name='kik',
version='1.2.0',
packages=['kik', 'kik.messages'],
package_dir={
'kik': 'kik',
| 'kik.messages': 'kik/messages'
},
author='kik',
author_email='bots@kik.com',
url='https://dev.kik.com',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
long_description=read("README.rst"),
install_requires=[
'requests>=2.3.0',
'six==1.10.0'
]
)
|
aweisberg/cassandra-dtest | largecolumn_test.py | Python | apache-2.0 | 3,367 | 0.003861 | import pytest
import re
import logging
from dtest import Tester
since = pytest.mark.since
logg | er = logging.getLogger(__name__)
@since('2.2')
class TestLargeColumn(Tester):
"""
Check that inserting and reading large columns to the database doesn't cause off heap memory usage
that is proportional to the size of the memory read/written.
"""
def stress_with_col_size(self, cluster, node, size):
size = str(size)
node.stress(['write', 'n=5', "no-warmup", "cl=ALL", "-pop", "seq=1...5", "-sc | hema", "replication(factor=2)", "-col", "n=fixed(1)", "size=fixed(" + size + ")", "-rate", "threads=1"])
node.stress(['read', 'n=5', "no-warmup", "cl=ALL", "-pop", "seq=1...5", "-schema", "replication(factor=2)", "-col", "n=fixed(1)", "size=fixed(" + size + ")", "-rate", "threads=1"])
def directbytes(self, node):
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
output, err, _ = node.nodetool("gcstats")
logger.debug(output)
output = output.split("\n")
assert re.search('Interval', output[0].strip())
fields = output[1].split()
assert len(fields) >= 6, "Expected output from nodetool gcstats has at least six fields. However >= fields is: {}".format(fields)
for field in fields:
assert is_number(field.strip()) or field == 'NaN', "Expected numeric from fields from nodetool gcstats. However, field.strip() is: {}".format(field.strip())
return fields[6]
def test_cleanup(self):
"""
@jira_ticket CASSANDRA-8670
"""
cluster = self.cluster
# Commit log segment size needs to increase for the database to be willing to accept columns that large
# internode compression is disabled because the regression being tested occurs in NIO buffer pooling without compression
cluster.set_configuration_options({'commitlog_segment_size_in_mb': 128, 'internode_compression': 'none'})
# Have Netty allocate memory on heap so it is clear if memory used for large columns is related to intracluster messaging
cluster.populate(2).start(jvm_args=[" -Dcassandra.netty_use_heap_allocator=true "])
node1, node2 = cluster.nodelist()
session = self.patient_cql_connection(node1)
logger.debug("Before stress {0}".format(self.directbytes(node1)))
logger.debug("Running stress")
# Run the full stack to see how much memory is utilized for "small" columns
self.stress_with_col_size(cluster, node1, 1)
beforeStress = self.directbytes(node1)
logger.debug("Ran stress once {0}".format(beforeStress))
# Now run the full stack to see how much memory is utilized for "large" columns
LARGE_COLUMN_SIZE = 1024 * 1024 * 63
self.stress_with_col_size(cluster, node1, LARGE_COLUMN_SIZE)
output, err, _ = node1.nodetool("gcstats")
afterStress = self.directbytes(node1)
logger.debug("After stress {0}".format(afterStress))
# Any growth in memory usage should not be proportional column size. Really almost no memory should be used
# since Netty was instructed to use a heap allocator
diff = int(afterStress) - int(beforeStress)
assert diff < LARGE_COLUMN_SIZE
|
mandli/surge-examples | michael/setplot.py | Python | mit | 11,195 | 0.002144 |
"""
Set up the plot figures, axes, and items to be done for each frame.
This module is imported by the plotting routines and then the
function setplot is called to set the plot parameters.
"""
import os
import numpy as np
import matplotlib.pyplot as plt
import datetime
from clawpack.geoclaw.util import fetch_noaa_tide_data
from clawpack.visclaw import colormaps, gaugetools
import clawpack.clawutil.data
import clawpack.amrclaw.data
import clawpack.geoclaw.data
import clawpack.geoclaw.surge.plot as surge
try:
from setplotfg import setplotfg
except:
setplotfg = None
def setplot(plotdata):
r"""Setplot function for surge plotting"""
plotdata.clearfigures() # clear any old figures,axes,items data
plotdata.format = 'binary'
# Load data from output
clawdata = clawpack.clawutil.data.ClawInputData(2)
clawdata.read(os.path.join(plotdata.outdir,'claw.data'))
physics = clawpack.geoclaw.data.GeoClawData()
physics.read(os.path.join(plotdata.outdir,'geoclaw.data'))
surge_data = clawpack.geoclaw.data.SurgeData()
surge_data.read(os.path.join(plotdata.outdir,'surge.data'))
friction_data = clawpack.geoclaw.data.FrictionData()
friction_data.read(os.path.join(plotdata.outdir,'friction.data'))
# Load storm track32
track = surge.track_data(os.path.join(plotdata.outdir,'fort.track'))
# Set afteraxes function
def surge_afteraxes(cd):
return surge.surge_afteraxes(cd, track, plot_direction=False)
# Limits for plots
dx = 0.5
dy = 0.5
regions = [{"name": "Full Domain",
"limits": [[clawdata.lower[0], clawdata.upper[0]],
[clawdata.lower[1], clawdata.upper[1]]]},
{"name": "New Orleans",
"limits": [[-92.5, -87.5], [27.5, 31.5]]},
{"name": "Grand Isle",
"limits": [[-89.41 - dx, -89.41 + dx],
[29.26 - dx, 29.26 + dx]]},
{"name": "Pilots Station East",
"limits": [[-89.41 - dx, -89.41 + dx],
[28.93 - dy, 28.93 + dy]]},
{"name": "Dauphin Island",
"limits": [[-88.08 - dx, -88.08 + dx],
[30.25 - dy, 30.25 + dy]]}]
# Color limits
surface_limits = [physics.sea_level - 5.0, physics.sea_level + 5.0]
surface_ticks = [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]
surface_labels = [str(value) for value in surface_ticks]
speed_limits = [0.0, 3.0]
speed_ticks = [0, 1, 2, 3]
speed_labels = [str(value) for value in speed_ticks]
wind_limits = [15, 40]
pressure_limits = [966,1013]
friction_bounds = [0.01,0.04]
# ==========================================================================
# ==========================================================================
# Plot specifications
# ============================ | ==============================================
# ==========================================================================
# Loop over region specifications ploting both surface and speeds
for re | gion in regions:
name = region['name']
xlimits = region['limits'][0]
ylimits = region['limits'][1]
# ======================================================================
# Surface Elevations
# ======================================================================
plotfigure = plotdata.new_plotfigure(name='Surface - %s' % name)
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Surface'
plotaxes.scaled = True
plotaxes.xlimits = xlimits
plotaxes.ylimits = ylimits
plotaxes.afteraxes = surge_afteraxes
surge.add_surface_elevation(plotaxes, bounds=surface_limits)
plotaxes.plotitem_dict['surface'].amr_patchedges_show = [0] * 10
surge.add_land(plotaxes)
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
# ======================================================================
# Water Speed
# ======================================================================
plotfigure = plotdata.new_plotfigure(name='Currents - %s' % name)
plotfigure.show = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Currents'
plotaxes.scaled = True
plotaxes.xlimits = xlimits
plotaxes.ylimits = ylimits
plotaxes.afteraxes = surge_afteraxes
surge.add_speed(plotaxes, bounds=speed_limits)
plotaxes.plotitem_dict['speed'].amr_patchedges_show = [0] * 10
surge.add_land(plotaxes)
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
# ======================================================================
# Wind Field
# ======================================================================
plotfigure = plotdata.new_plotfigure(name='Wind Speed - %s' % name)
plotfigure.show = surge_data.wind_forcing
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = "Wind Field"
plotaxes.scaled = True
plotaxes.xlimits = xlimits
plotaxes.ylimits = ylimits
plotaxes.afteraxes = surge_afteraxes
surge.add_wind(plotaxes, bounds=wind_limits)
plotaxes.plotitem_dict['wind'].amr_patchedges_show = [0] * 10
surge.add_land(plotaxes)
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
# ========================================================================
# Hurricane forcing
# ========================================================================
# Friction field
plotfigure = plotdata.new_plotfigure(name='Friction - %s' % name)
plotfigure.show = friction_data.variable_friction
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = "Manning's N Coefficients"
plotaxes.scaled = True
plotaxes.xlimits = xlimits
plotaxes.ylimits = ylimits
plotaxes.afteraxes = surge_afteraxes
surge.add_friction(plotaxes, bounds=friction_bounds)
# Pressure field
plotfigure = plotdata.new_plotfigure(name='Pressure - %s' % name)
plotfigure.show = surge_data.pressure_forcing
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = "Pressure Field"
plotaxes.scaled = True
plotaxes.xlimits = xlimits
plotaxes.ylimits = ylimits
plotaxes.afteraxes = surge_afteraxes
surge.add_pressure(plotaxes, bounds=pressure_limits)
plotaxes.plotitem_dict['pressure'].amr_patchedges_show = [0] * 10
surge.add_land(plotaxes)
plotaxes.plotitem_dict['land'].amr_patchedges_show = [0] * 10
# ========================================================================
# Figures for gauges
# ========================================================================
plotfigure = plotdata.new_plotfigure(name='Surface & Topo', figno=300, \
type='each_gauge')
plotfigure.show = True
plotfigure.clf_each_gauge = True
stations = [('8761724', 'Grand Isle, LA'),
('8760922', 'Pilots Station East, SW Pass, LA'),
('8735180', 'Dauphin Island, AL')]
landfall_time = np.datetime64('2005-08-29T11:10')
begin_date = datetime.datetime(2005, 8, 26)
end_date = datetime.datetime(2005, 8, 31)
def get_actual_water_levels(station_id):
# Fetch water levels and tide predictions for given station
date_time, water_level, tide = fetch_noaa_tide_data(station_id,
begin_date, end_date)
# Calculate times relative to landfall
secs_rel_landfall = (date_time - landfall_time) / np.timedelta64(1, 's')
# Subtract tide predictions from measured water levels
water_level -= tide
return secs_rel_landfall, water_level
def gauge_afteraxes(cd):
station_id, station_name = stations[cd.gaugeno - 1]
|
Gaulois94/python-sfml | examples/spacial_music/spacial_music.py | Python | lgpl-3.0 | 3,151 | 0.025071 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# pySFML - Python bindings for SFML
# Copyright 2012-2013, Jonathan De Wachter <dewachter.jonathan@gmail.com>
#
# This software is released under the LGPLv3 li | cense.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sfml as sf
def main(song):
window = sf.RenderWindow(sf.VideoMode(600, 600), "pySFML - Spacial Music")
window.framerate_limit = 60
# load one font, one song and two textures |
try:
font = sf.Font.from_file("data/sansation.ttf")
music = sf.Music.from_file(song)
texture = sf.Texture.from_file("data/speaker.gif")
speaker = sf.Sprite(texture)
speaker.position = -texture.size // 2
texture = sf.Texture.from_file("data/head_kid.png")
hears = sf.Sprite(texture)
hears.origin = texture.size // 2
except IOError:
exit(1)
# create a text that display instructions
instructions = "Up/Down Move hears along Y axis\n"
instructions += "Left/Right Move hears along X axis\n"
instructions += "Plus/Minus Move hears along Z axis"
instructions = sf.Text(instructions, font, 12)
instructions.position = (70, 250)
instructions.color = sf.Color.BLACK
# make sure the song is monothread so it can be spacialized
if music.channel_count != 1:
print("Only sounds with one channel (mono sounds) can be spatialized.")
print("This song ({0}) has {1} channels.".format(SONG, music.channels_count))
exit(1)
# setup the music properties
music.relative_to_listener = False
music.min_distance = 200
music.attenuation = 1
# initialize some values before entering the main loop
position = sf.Vector3(-250, -250, 0)
sf.Listener.set_position(position)
x, y, _ = position
hears.position = (x, y)
running = True
# move the view to make coord (0, 0) appears on the center of the screen
window.default_view.move(-300, -300)
# start the music before entering the main loop
music.loop = True
music.play()
# start the main loop
while running:
for event in window.events:
if type(event) is sf.CloseEvent:
running = False
elif type(event) is sf.KeyEvent and event.pressed:
if event.code is sf.Keyboard.UP:
position.y -= 5
elif event.code is sf.Keyboard.DOWN:
position.y += 5
elif event.code is sf.Keyboard.LEFT:
position.x -= 5
elif event.code is sf.Keyboard.RIGHT:
position.x += 5
elif event.code is sf.Keyboard.ADD:
if position.z < 400:
position.z += 5
elif event.code is sf.Keyboard.SUBTRACT:
if position.z > -400:
position.z -= 5
# update the listener and the hears position
sf.Listener.set_position(position)
x, y, z = position
hears.position = (x, y)
hears.ratio = (1, 1) + sf.Vector2(z, z)/400.
# clear screen, draw images and text and display them
window.clear(sf.Color.WHITE)
if position.z >= 0:
window.draw(speaker)
window.draw(hears)
else:
window.draw(hears)
window.draw(speaker)
window.draw(instructions)
window.display()
window.close()
if __name__ == "__main__":
main("data/mario.flac")
|
antepsis/anteplahmacun | sympy/core/numbers.py | Python | bsd-3-clause | 106,958 | 0.000524 | from __future__ import print_function, division
import decimal
import fractions
import math
import re as regex
from collections import defaultdict
from .containers import Tuple
from .sympify import converter, sympify, _sympify, SympifyError
from .singleton import S, Singleton
from .expr import Expr, AtomicExpr
from .decorators import _sympifyit
from .cache import cacheit, clear_cache
from .logic import fuzzy_not
from sympy.core.compatibility import (
as_int, integer_types, long, string_types, with_metaclass, HAS_GMPY,
SYMPY_INTS)
import mpmath
import mpmath.libmp as mlib
from mpmath.libmp import mpf_pow, mpf_pi, mpf_e, phi_fixed
from mpmath.ctx_mp import mpnumeric
from mpmath.libmp.libmpf import (
finf as _mpf_inf, fninf as _mpf_ninf,
fnan as _mpf_nan, fzero as _mpf_zero, _normalize as mpf_normalize,
prec_to_dps)
from sympy.utilities.misc import debug, filldedent
from .evaluate import global_evaluate
rnd = mlib.round_nearest
_LOG2 = math.log(2)
def comp(z1, z2, tol=None):
"""Return a bool indicating whether the error between z1 and z2 is <= tol.
If ``tol`` is None then True will be returned if there is a significant
difference between the numbers: ``abs(z1 - z2)*10**p <= 1/2`` where ``p``
is the lower of the precisions of the values. A comparison of strings will
be made if ``z1`` is a Number and a) ``z2`` is a string or b) ``tol`` is ''
and ``z2`` is a Number.
When ``tol`` is a nonzero value, if z2 is non-zero and ``|z1| > 1``
the error is normalized by ``|z1|``, so if you want to see if the
absolute error between ``z1`` and ``z2`` is <= ``tol`` then call this
as ``comp(z1 - z2, 0, tol)``.
"""
if type(z2) is str:
if not isinstance(z1, Number):
raise ValueError('when z2 is a str z1 must be a Number')
return str(z1) == z2
if not z1:
z1, z2 = z2, z1
if not z1:
return True
if not tol:
if tol is None:
if type(z2) is str and getattr(z1, 'is_Number', False):
return str(z1) == z2
a, b = Float(z1), Float(z2)
return int(abs(a - b)*10**prec_to_dps(
min(a._prec, b._prec)))*2 <= 1
elif all(getattr(i, 'is_Number', False) for i in (z1, z2)):
return z1._prec == z2._prec and str(z1) == str(z2)
raise ValueError('exact comparison requires two Numbers')
diff = abs(z1 - z2)
az1 = abs(z1)
if z2 and az1 > 1:
return diff/az1 <= tol
else:
return diff <= tol
def mpf_norm(mpf, prec):
"""Return the mpf tuple normalized appropriately for the indicated
precision after doing a check to see if zero should be returned or
not when the mantissa is 0. ``mpf_normlize`` always assumes that this
is zero, but it may not be since the mantissa for mpf's values "+inf",
"-inf" and "nan" have a mantissa of zero, too.
Note: this is not intended to validate a given mpf tuple, so sending
mpf tuples that were not created by mpmath may produce bad results. This
is only a wrapper to ``mpf_normalize`` which provides the check for non-
zero mpfs that have a 0 for the mantissa.
"""
sign, man, expt, bc = mpf
if not man:
# hack for mpf_normalize which does not do this;
# it assumes that if man is zero the result is 0
# (see issue 6639)
if not bc:
return _mpf_zero
else:
# don't change anything; this should already
# be a well formed mpf tuple
return mpf
rv = mpf_normalize(sign, man, expt, bc, prec, rnd)
return rv
# TODO: we should use the warnings module
_errdict = {"divide": False}
def seterr(divide=False):
"""
Should sympy raise an exception on 0/0 or return a nan?
divide == True .... raise an exception
divide == False ... return nan
"""
if _errdict["divide"] != divide:
clear_cache()
_errdict["divide"] = divide
def _decimal_to_Rational_prec(dec):
"""Convert an ordinary decimal instance to a Rational."""
if not dec.is_finite():
raise TypeError("dec must be finite, got %s." % dec)
s, d, e = dec.as_tuple()
prec = len(d)
if e >= 0: # it's an integer
rv = Integer(int(dec))
else:
s = (-1)**s
d = sum([di*10**i for i, di in enumerate(reversed(d))])
rv = Rational(s*d, 10**-e)
return rv, prec
def _literal_float(f):
"""Return True if n can be interpreted as a floating point number."""
pat = r"[-+]?((\d*\.\d+)|(\d+\.?))(eE[-+]?\d+)?"
return bool(regex.match(pat, f))
# (a,b) -> gcd(a,b)
_gcdcache = {}
# TODO caching with decorator, but not to degrade performance
def igcd(*args):
"""Computes nonnegative integer greatest common divisor.
The algorithm is based on the well known Euclid's algorithm. To
improve speed, igcd() has its own c | aching mechanism impleme | nted.
Examples
========
>>> from sympy.core.numbers import igcd
>>> igcd(2, 4)
2
>>> igcd(5, 10, 15)
5
"""
if len(args) < 2:
raise TypeError(
'igcd() takes at least 2 arguments (%s given)' % len(args))
if 1 in args:
a = 1
k = 0
else:
a = abs(as_int(args[0]))
k = 1
if a != 1:
while k < len(args):
b = args[k]
k += 1
try:
a = _gcdcache[(a, b)]
except KeyError:
b = as_int(b)
if not b:
continue
if b == 1:
a = 1
break
if b < 0:
b = -b
t = a, b
while b:
a, b = b, a % b
_gcdcache[t] = _gcdcache[t[1], t[0]] = a
while k < len(args):
ok = as_int(args[k])
k += 1
return a
def ilcm(*args):
"""Computes integer least common multiple.
Examples
========
>>> from sympy.core.numbers import ilcm
>>> ilcm(5, 10)
10
>>> ilcm(7, 3)
21
>>> ilcm(5, 10, 15)
30
"""
if len(args) < 2:
raise TypeError(
'ilcm() takes at least 2 arguments (%s given)' % len(args))
if 0 in args:
return 0
a = args[0]
for b in args[1:]:
a = a*b // igcd(a, b)
return a
def igcdex(a, b):
"""Returns x, y, g such that g = x*a + y*b = gcd(a, b).
>>> from sympy.core.numbers import igcdex
>>> igcdex(2, 3)
(-1, 1, 1)
>>> igcdex(10, 12)
(-1, 1, 2)
>>> x, y, g = igcdex(100, 2004)
>>> x, y, g
(-20, 1, 4)
>>> x*100 + y*2004
4
"""
if (not a) and (not b):
return (0, 1, 0)
if not a:
return (0, b//abs(b), abs(b))
if not b:
return (a//abs(a), 0, abs(a))
if a < 0:
a, x_sign = -a, -1
else:
x_sign = 1
if b < 0:
b, y_sign = -b, -1
else:
y_sign = 1
x, y, r, s = 1, 0, 0, 1
while b:
(c, q) = (a % b, a // b)
(a, b, r, s, x, y) = (b, c, x - q*r, y - q*s, r, s)
return (x*x_sign, y*y_sign, a)
def mod_inverse(a, m):
"""
Return the number c such that, ( a * c ) % m == 1 where
c has the same sign as a. If no such value exists, a
ValueError is raised.
Examples
========
>>> from sympy import S
>>> from sympy.core.numbers import mod_inverse
Suppose we wish to find multiplicative inverse x of
3 modulo 11. This is the same as finding x such
that 3 * x = 1 (mod 11). One value of x that satisfies
this congruence is 4. Because 3 * 4 = 12 and 12 = 1 mod(11).
This is the value return by mod_inverse:
>>> mod_inverse(3, 11)
4
>>> mod_inverse(-3, 11)
-4
When there is a commono factor between the numerators of
``a`` and ``m`` the inverse does not exist:
>>> mod_inverse(2, 4)
Traceback (most recent call last):
...
ValueError: inverse of 2 mod 4 does not exist
>>> mod_inverse(S(2)/7, S(5)/2)
7/2
References
==========
- https://en.wikipedia.org/wiki/Modular_multiplicat |
FRC830/opencv-tools | camera.py | Python | mit | 3,853 | 0.001557 | from __future__ import division, print_function, unicode_literals
import cv2
import os
import threading
import time
import script
try:
# Python 3
import queue
except ImportError:
# Python 2
import Queue as queue
class Camera(object):
def __init__(self, id, fps=20, width=640, height=480):
self._id = id
self.cap_thread = CaptureThread(self)
self.fps = fps
self.width = width
self.height = height
self.cap_thread.start()
@property
def id(self):
return self._id
class capture_property(property):
""" A wrapper for OpenCV capture properties
Any changes to these attributes are queued to be processed by the
capture thread on its next tick.
"""
def __init__(self, name, cv_property_id):
self.name = name
self.cv_property_id = cv_property_id
def __get__(self, camera, owner):
return getattr(camera, '_' + self.name)
def __set__(self, camera, value):
setattr(camera, '_' + self.name, value)
camera.cap_thread.opt_queue.put((self.cv_property_id, value))
fps = capture_property('fps', cv2.cv.CV_CAP_PROP_FPS)
width = capture_property('width', c | v2.cv.CV_CAP_PROP_FRAME_WIDTH)
height = capture_property('height', cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)
def __getattr__(self, attr):
""" Fall back to CaptureThread attributes """
return getattr(self.cap_thread, attr)
class CaptureThread(threading.Thread):
_read_lock = threading.Lock()
if 'STATIC_IMAGE' in os.environ:
static_image = cv2.imread(os.environ['STATIC_IMA | GE'])
def __init__(self, camera):
super(CaptureThread, self).__init__()
self.parent_thread = threading.current_thread()
self.camera = camera
self._running = False
self.cap = cv2.VideoCapture(camera.id)
self.opt_queue = queue.Queue()
self.image_lock = threading.Lock()
self._image = None
self.image_ok = False
self.script_error = False
def run(self):
self._running = True
while self._running:
while True:
try:
self.cap.set(*self.opt_queue.get(block=False))
except queue.Empty:
break
with self._read_lock:
if getattr(self, 'static_image', None) is not None:
image = cv2.resize(self.static_image, (self.camera.width, self.camera.height))
self.image_ok = True
else:
self.image_ok, image = self.cap.read()
if self.image_ok:
try:
if script.current_script:
image_out = script.current_script.trigger('frame', image, self.camera)
if isinstance(image_out, type(image)):
image = image_out
except script.ScriptError as e:
self.script_error = True
import traceback
print(e)
print(e.traceback)
else:
self.script_error = False
self.image = image
time.sleep(1 / self.camera.fps)
# Stop if the parent thread died
if not self.parent_thread.is_alive():
self.stop()
def stop(self):
self._running = False
self.image_ok = False
@property
def image(self):
with self.image_lock:
return self._image
@image.setter
def image(self, image):
with self.image_lock:
self._image = image
class _FakeCamera:
image = None
image_ok = False
fps = 0
width = 640
height = 480
fake_camera = _FakeCamera()
|
fxstein/SentientHome | feed/feed.home.zillow.py | Python | apache-2.0 | 2,755 | 0.000363 | #!/usr/local/bin/python3 -u
"""
Author: Oliver Ratzesberger <https://github.com/fxstein>
Copyright: Copyright (C) 2016 Oliver Ratzesberger
License: Apache License, Version 2.0
"""
# Make sure we have access to SentientHome commons
import os
import sys
try:
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/..')
except:
exit(1)
# Sentient Home Application
from common.shapp import shApp
from common.shutil import xml_to_dict
from common.sheventhandler import shEventHandler
# Default settings
from cement.utils.misc import init_defaults
defaults = init_defaults('zillow', 'zillow')
defaults['zillow']['poll_interval'] = 3600.0
with shApp('zillow', config_defaults=defaults) as app:
app.run()
handler = shEventHandler(app)
while True:
r = handler.get(app.config.get('zillow', 'zillow_addr') + ":" +
app.config.get('zillow', 'zillow_port') +
app.config.get('zillow', 'zillow_path') + "?zws-id=" +
app.config.get('zillow', 'zillow_zws_id') + "&zpid=" +
app.config.get('zillow', 'zillow_zpid'))
# app.log.debug('Fetch data: %s' % r.text)
data = xml_to_dict(r.text)
app.log.debug('Raw data: %s' % data)
# Data Structure Documentation:
# http://www.zillow.com/howto/api/APIOverview.htm
request_data = data[
'{http://www.zillow.com/static/xsd/Zestimate.xsd}zestimate'][
'response']
property_data = data[
'{http://www.zillow.com/static/xsd/Zestimate.x | sd}zestimate'][
'response']['zestimate']
local_data = data[
'{http://www.zillow.com/static/xsd/Zestimate.xsd}zestimate'][
| 'response']['localRealEstate']
event = [{
'measurement': 'zillow',
'tags': {
'zpid': request_data['zpid'],
'region': local_data['region']['@name'],
'region_type': local_data['region']['@type'],
},
'fields': {
'valuation': float(property_data['amount']),
'30daychange': float(property_data['valueChange']),
'range_high': float(property_data['valuationRange']['high']),
'range_low': float(property_data['valuationRange']['low']),
'percentile': int(property_data['percentile']),
'zindexValue': float(local_data['region']
['zindexValue'].replace(',', '')),
'last_updated': property_data['last-updated'],
}
}]
app.log.debug('Event data: %s' % event)
handler.postEvent(event)
handler.sleep()
|
jimmynl/hadafuna | tests/core/test_game.py | Python | mit | 174 | 0 | from unit | test2 import TestCase, main
from hadafuna.core.game import KoikoiGame
class KoikoiGameTest(TestCase):
pass
if __name__ == '__main__':
main | (verbosity=2)
|
ovresko/erpnext | erpnext/setup/doctype/setup_progress/test_setup_progress.py | Python | gpl-3.0 | 219 | 0.009132 | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Fra | ppe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
class TestSetupProgres | s(unittest.TestCase):
pass
|
kamal-gade/rockstor-core | manage.py | Python | gpl-3.0 | 251 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "RockStor.settings")
from | django.core.management import execute_fr | om_command_line
execute_from_command_line(sys.argv)
|
goptavares/aDDM-Toolbox | addm_toolbox/ddm_mla_test.py | Python | gpl-3.0 | 5,299 | 0.000189 | #!/usr/bin/env python
"""
Copyright (C) 2017, California Institute of Technology
This file is part of addm_toolbox.
addm_toolbox is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
addm_toolbox is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with addm_toolbox. If not, see <http://www.gnu.org/licenses/>.
---
Module: ddm_mla_test.py
Author: Gabriela Tavares, gtavares@caltech.edu
Performs a test to check the validity of the maximum likelihood algorithm (MLA)
for the drift-diffusion model (DDM). Artificial data is generated using
specific parameters for the model. These parameters are then recovered through
a maximum likelihood estimation procedure, using a grid search over the 2 free
parameters of the model.
"""
from __future__ import absolute_import
import numpy as np
import pkg_resources
from builtins import range, str
from multiprocessing import Pool
from .ddm_mla import DDM
from .util import load_trial_conditions_from_csv
def wrap_ddm_get_model_log_likelihood(args):
"""
Wrapper for DDM.get_model_log_likelihood(), intended for parallel
computation using a threadpool.
Args:
args: a tuple where the first item is a DDM object, and the remaining
item are the same arguments required by
DDM.get_model_log_likelihood().
Returns:
The output of DDM.get_model_log_likelihood().
"""
model = args[0]
return model.get_model_log_likelihood(*args[1:])
def main(d, sigma, rangeD, rangeSigma, trialsFileName=None, numTrials=10,
numSimulations=10, binStep=100, maxRT=8000, numThreads=9,
verbose=False):
"""
Args:
d: float, DDM parameter for generating artificial data.
sigma: float, DDM parameter for generating artificial data.
rangeD: list of floats, search range for parameter d.
rangeSigma: list of floats, search range | for parameter sigma.
trialsFileName: string, path of trial conditions file.
numTrials: int, number of artificial data trials | to be generated per
trial condition.
numSimulations: int, number of simulations to be generated per trial
condition, to be used in the RT histograms.
binStep: int, size of the bin step to be used in the RT histograms.
maxRT: int, maximum RT to be used in the RT histograms.
numThreads: int, size of the thread pool.
verbose: boolean, whether or not to increase output verbosity.
"""
pool = Pool(numThreads)
histBins = list(range(0, maxRT + binStep, binStep))
# Load trial conditions.
if not trialsFileName:
trialsFileName = pkg_resources.resource_filename(
u"addm_toolbox", u"test_data/test_trial_conditions.csv")
trialConditions = load_trial_conditions_from_csv(trialsFileName)
# Generate artificial data.
dataRTLeft = dict()
dataRTRight = dict()
for trialCondition in trialConditions:
dataRTLeft[trialCondition] = list()
dataRTRight[trialCondition] = list()
model = DDM(d, sigma)
for trialCondition in trialConditions:
t = 0
while t < numTrials:
try:
trial = model.simulate_trial(
trialCondition[0], trialCondition[1])
except:
print(u"An exception occurred while generating artificial "
"trial " + str(t) + u" for condition " +
str(trialCondition[0]) + u", " + str(trialCondition[1]) +
u".")
raise
if trial.choice == -1:
dataRTLeft[trialCondition].append(trial.RT)
elif trial.choice == 1:
dataRTRight[trialCondition].append(trial.RT)
t += 1
# Generate histograms for artificial data.
dataHistLeft = dict()
dataHistRight = dict()
for trialCondition in trialConditions:
dataHistLeft[trialCondition] = np.histogram(
dataRTLeft[trialCondition], bins=histBins)[0]
dataHistRight[trialCondition] = np.histogram(
dataRTRight[trialCondition], bins=histBins)[0]
# Grid search on the parameters of the model.
if verbose:
print(u"Performing grid search over the model parameters...")
listParams = list()
models = list()
for d in rangeD:
for sigma in rangeSigma:
model = DDM(d, sigma)
models.append(model)
listParams.append((model, trialConditions, numSimulations,
histBins, dataHistLeft, dataHistRight))
logLikelihoods = pool.map(wrap_ddm_get_model_log_likelihood, listParams)
pool.close()
if verbose:
for i, model in enumerate(models):
print(u"L" + str(model.params) + u" = " + str(logLikelihoods[i]))
bestIndex = logLikelihoods.index(max(logLikelihoods))
print(u"Best fit: " + str(models[bestIndex].params))
|
galileo-project/Galileo-dockyard | server/dockyard/driver/task/_model/__init__.py | Python | mit | 137 | 0 | from dockyard.utils.mongo im | port Mongo
class Task(Mongo):
"""
channel
msg
receivers
| expire
"""
|
seblefevre/testerman | plugins/probes/configurationfile/__init__.py | Python | gpl-2.0 | 30 | 0 | impo | rt ConfigurationFilePro | be
|
stefanschramm/osm_oepnv_validator | rn.py | Python | gpl-3.0 | 4,325 | 0.024509 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# rn.py - load network of routes from OSM
#
# Copyright (C) 2012, Stefan Schramm <mail@stefanschramm.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import re
import os
import stat
import datetime
from imposm.parser import OSMParser
from mako.lookup import TemplateLookup
class RouteNetwork(object):
# dummy profile
profile = {
'name': '',
'shortname': '',
'filter_text': '',
'datasource': '',
'stopplan': False,
'maps': {}
}
pbf = ""
mtime = None
relation_filter = lambda r: True
makolookup = TemplateLookup(directories=[os.path.dirname(__file__) + '/templates'])
def __init__(self):
# the interesting objects will be stored in these 3 dicts:
# dict of relations; index: relation id
# each relation consists of (relation_id, tags, members)
# where members consists of (member_id, member_type, role)
self.relations = {}
# dict of ways; index: way id
# each way consists of (way_id, tags, node_ids)
self.ways = {}
# dict of nodes; index: node id
# each node consists of (node_id, tags, coordinates)
self.nodes = {}
# additionally information about parent-relations is collected:
# dict of parent relations; index: id of relation to get parent relations for
self.parents = {}
def load_network(self, pbf, filterfunction=lambda r: True):
# read data of public transport network
# required for validating and displaying
self.relation_filter = filterfunction
# get modification time of data source
# TODO: how to determine time when reading from multiple sources?
self.mtime = datetime.datetime.fromtimestamp(os.stat(pbf)[stat.ST_MTIME])
# first pass:
# collect all interesting relations
print "Collecting relations..."
p = OSMParser(concurrency=4, relations_callback=self.relations_cb)
p.parse(pbf)
# second pass:
# collect ways for these relations
print "Collecting %i ways..." % len(self.ways)
p = OSMParser(concurrency=4, ways_callback=self.ways_cb)
p.parse(pbf)
# collect nodes for collected relations and ways
print "Collecting %i nodes..." % len(self.nodes)
p = OSMParser(concurrency=4, nodes_callback=self.nodes_cb)
p.parse(pbf)
def relations_cb(self, relations | ):
# callback: collect routes to validate
for relation in relations:
rid, tags, members = relation
if self.relation_filter(relation):
self.relations[rid] = relation
for member in members:
mid, typ, role = member
if typ == "node":
self.nodes[mid] = None
if typ == "way":
self.ways[mid] = None
if (typ, mid) not in self.parents:
se | lf.parents[(typ, mid)] = [("relation", rid)]
else:
self.parents[(typ, mid)].append(("relation", rid))
def ways_cb(self, ways):
# callback: collect interesting ways
for way in ways:
wid, tags, nodes = way
if wid in self.ways and self.ways[wid] == None:
self.ways[wid] = way
for nid in nodes:
self.nodes[nid] = None
def nodes_cb(self, nodes):
# callback: collect interesting nodes
for node in nodes:
nid, tags, coords = node
if nid in self.nodes and self.nodes[nid] == None:
self.nodes[nid] = node
def get_sortkey(self, relation):
rid, tags, members = relation
key = ""
if "route_master" in tags:
key += tags["route_master"]
elif "route" in tags:
key += tags["route"]
key += "_"
if "ref" in tags:
ref = tags["ref"]
for number in set(re.findall("[0-9]+", ref)):
# append a lot of leading zeroes to each number
ref = ref.replace(number, "%010i" % int(number))
key += ref
key += "_"
if "type" in tags and tags["type"] == "route_master":
# for same refs put route_master at top
key += "0"
else:
key += "1"
return key
|
silent1mezzo/jeeves-framework | jeeves/conf/project_template/bot.py | Python | isc | 246 | 0 | #!/usr/bin/env python
import | os
import sys
if __name__ == "__main__":
os.environ.setdefault("JEEVES_SETTINGS_MODULE", "settings")
| from jeeves.core.management import execute_from_command_line
execute_from_command_line(sys.argv[1:])
|
geometer/book_tools | encrypt.py | Python | mit | 1,584 | 0.005051 | #!/usr/bin/python
import shutil, string, sy | s, tempfile
from argparse import ArgumentParser
f | rom fbreader.format.epub import EPub
def verify_key(key, name):
if len(key) != 32:
raise Exception('Incorrect %s length %d, 32 expected' % (name, len(key)))
for sym in key:
if not sym in string.hexdigits:
raise Exception('Incorrect character %s in %s' % (sym, name))
def parse_command_line():
parser = ArgumentParser(
description='Marlin ePub encryption tool'
)
parser.add_argument(
'-s',
'--keep-unencrypted',
dest='keep_unencrypted',
metavar='keep_unencrypted',
nargs='*',
help='entry to keep not encrypted (tool encrypts all entries except cover by default)'
)
parser.add_argument(
'-k',
dest='key',
metavar='key',
required=True,
help='encryption key (32-digit hex number)'
)
parser.add_argument(
'-ci',
dest='content_id',
metavar='content_id',
required=True,
help='content id'
)
parser.add_argument(
'epub',
help='name of ePub file to encrypt'
)
params = parser.parse_args(sys.argv[1:])
verify_key(params.key, 'key')
return params
if __name__ == '__main__':
params = parse_command_line()
working_dir = tempfile.mkdtemp(dir='.')
try:
epub = EPub(params.epub, params.epub)
epub.encrypt(params.key, params.content_id, working_dir, files_to_keep=params.keep_unencrypted)
finally:
shutil.rmtree(working_dir)
|
openstack/python-tackerclient | tackerclient/tacker/v1_0/nfvo/vim.py | Python | apache-2.0 | 4,822 | 0 | # Copyright 2016 Brocade Communications Systems Inc
# All Rights Reserved.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import yaml
from oslo_utils import strutils
from tackerclient.common import exceptions
from tackerclient.i18n import _
from tackerclient.tacker import v1_0 as tackerV10
from tackerclient.tacker.v1_0.nfvo import vim_utils
_VIM = "vim"
class ListVIM(tackerV10.ListCommand):
"""List VIMs that belong to a given tenant."""
resource = _VIM
list_columns = ['id', 'tenant_id', 'name', 'type', 'is_default',
'placement_attr', 'status']
class ShowVIM(tackerV10.ShowCommand):
"""Show information of a given VIM."""
resource = _VIM
class CreateVIM(tackerV10.CreateCommand):
"""Create a VIM."""
resource = _VIM
def add_known_arguments(self, parser):
parser.add_argument(
'--config-file',
required=True,
help=_('YAML file with VIM configuration parameters'))
parser.add_argument(
'name', metavar='NAME',
help=_('Set a name for the VIM'))
parser.add_argument(
'--description',
help=_('Set a description for the VIM'))
parser.add_argument(
'--is-default',
action='store_true',
default=False,
help=_('Set as default VIM'))
def args2body(self, parsed_args):
body = {self.resource: {}}
if parsed_args.config_file:
with open(parsed_args.config_file) as f:
vim_config = f.read()
try:
config_param = yaml.load(vim_config,
Loader=yaml.SafeLoader)
except yaml.YAMLError as e:
raise exceptions.InvalidInput(reason=e)
vim_obj = body[self.resource]
try:
auth_url = config_param.pop('auth_url')
except KeyError:
raise exceptions.TackerClientException(message='Auth URL must be '
'specified',
status_code=404)
vim_obj['auth_url'] = vim_utils.validate_auth_url(auth_url).geturl()
vim_utils.args2body_vim(config_param, vim_obj)
tackerV10.update_dict(parsed_args, body[self.resource],
['tenant_id', 'name', 'description',
'is_default'])
return body
class UpdateVIM(tackerV10.UpdateCommand):
"""Update a given VIM."""
resource = _VIM
def add_known_arguments(self, parser):
parser.add_argument(
'--config-file',
required=False,
help=_('YAML file with VIM configuration parameters'))
parser.add_argument(
'--name',
help=_('New name for the VIM'))
parser.add_argument(
'--description',
help=_('New description for the VIM'))
parser.add_ar | gument(
'--is-default',
type=strutils.bool_from_string,
metavar='{True,False}',
help=_('Indicate whether the VIM is used as default'))
def args2body(self, parsed_args):
body = {self.resource: {}}
config_param = None
# config arg passed as d | ata overrides config yaml when both args passed
if parsed_args.config_file:
with open(parsed_args.config_file) as f:
config_yaml = f.read()
try:
config_param = yaml.load(config_yaml,
Loader=yaml.SafeLoader)
except yaml.YAMLError as e:
raise exceptions.InvalidInput(reason=e)
vim_obj = body[self.resource]
if config_param is not None:
vim_utils.args2body_vim(config_param, vim_obj)
tackerV10.update_dict(parsed_args, body[self.resource],
['tenant_id', 'name', 'description',
'is_default'])
# type attribute is read-only, it can't be updated, so remove it
# in update method
body['vim'].pop('type', None)
return body
class DeleteVIM(tackerV10.DeleteCommand):
"""Delete given VIM(s)."""
resource = _VIM
|
gijs/solpy | solpy/summary.py | Python | lgpl-2.1 | 787 | 0.012706 | """summerize pv systems"""
import argparse
import pv
import os
import datetime
parser = argpars | e.ArgumentParser()
parser.add_argument('--verbose', '-v', action='count')
| parser.add_argument('files', nargs='*')
args = parser.parse_args()
total_dc = 0
sdate = datetime.datetime(2014,6,16)
for i in args.files:
try:
plant = pv.load_system(i)
ctime = os.path.getmtime(i)
cdt = datetime.datetime.fromtimestamp(ctime)
if cdt > sdate:
total_dc += plant.p_dc(1000)/1000.
print '%s, %s - %s, %s KW DC, %s KW AC' % (cdt, plant.system_name , \
plant.phase, round(plant.p_dc(1000)/1000,1), \
round(plant.p_ac(1000)/1000,1))
except:
print "error in %s" % i
print 'total: %s' % total_dc
|
DailyActie/Surrogate-Model | surrogate/benchmarks/__init__.py | Python | mit | 1,275 | 0.000784 | # MIT License
#
# Copyright (c) | 2016 Daily Actie
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons | to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Author: Quan Pan <quanpan302@hotmail.com>
# License: MIT License
# Create: 2016-12-02
from .moea import *
__all__ = [
'zdt1', 'zdt2', 'zdt3', 'zdt4', 'zdt6'
]
|
guillaumebel/nibbles-clutter | glchess/src/lib/chess/pgn.py | Python | gpl-2.0 | 18,947 | 0.009659 | # -*- coding: utf-8 -*-
"""
Implement a PGN reader/writer.
See http://www.chessclub.com/help/PGN-spec
"""
__author__ = 'Robert Ancell <bob27@users.sourceforge.net>'
__license__ = 'GNU General Public License Version 2'
__copyright__ = 'Copyright 2005-2006 Robert Ancell'
import re
"""
; Example PGN file
[Event "F/S Return Match"]
[Site "Belgrade, Serbia JUG"]
[Date "1992.11.04"]
[Round "29"]
[White "Fischer, Robert J."]
[Black "Spassky, Boris V."]
[Result "1/2-1/2"]
1. e4 e5 2. Nf3 Nc6 3. Bb5 a6 4. Ba4 Nf6 5. O-O Be7 6. Re1 b5 7. Bb3 d6 8. c3
O-O 9. h3 Nb8 10. d4 Nbd7 11. c4 c6 12. cxb5 axb5 13. Nc3 Bb7 14. Bg5 b4 15.
Nb1 h6 16. Bh4 c5 17. dxe5 Nxe4 18. Bxe7 Qxe7 19. exd6 Qf6 20. Nbd2 Nxd6 21.
Nc4 Nxc4 22. Bxc4 Nb6 23. Ne5 Rae8 24. Bxf7+ Rxf7 25. Nxf7 Rxe1+ 26. Qxe1 Kxf7
27. Qe3 Qg5 28. Qxg5 hxg5 29. b3 Ke6 30. a3 Kd6 31. axb4 cxb4 32. Ra5 Nd5 33.
f3 Bc8 34. Kf2 Bf5 35. Ra7 g6 36. Ra6+ Kc5 37. Ke1 Nf4 38. g3 Nxh3 39. Kd2 Kb5
40. Rd6 Kc5 41. Ra6 Nf2 42. g4 Bd3 43. Re6 1/2-1/2
"""
RESULT_INCOMPLETE = '*'
RESULT_WHITE_WIN = '1-0'
RESULT_BLACK_WIN = '0-1'
RESULT_DRAW = '1/2-1/2'
results = {RESULT_INCOMPLETE: RESULT_INCOMPLETE,
RESULT_WHITE_WIN: RESULT_WHITE_WIN,
RESULT_BLACK_WIN: RESULT_BLACK_WIN,
RESULT_DRAW: RESULT_DRAW}
"""The required tags in a PGN file (the seven tag roster, STR)"""
TAG_EVENT = 'Event'
TAG_SITE = 'Site'
TAG_DATE = 'Date'
TAG_ROUND = 'Round'
TAG_WHITE = 'White'
TAG_BLACK = 'Black'
TAG_RESULT = 'Result'
"""Optional tags"""
TAG_TIME = 'Time'
TAG_FEN = 'FEN'
TAG_WHITE_TYPE = 'WhiteType'
TAG_WHITE_ELO = 'WhiteElo'
TAG_BLACK_TYPE = 'BlackType'
TAG_BLACK_ELO = 'BlackElo'
TAG_TIME_CONTROL = 'TimeControl'
TAG_TERMINATION = 'Termination'
# Values for the WhiteType and BlackType tag
PLAYER_HUMAN = 'human'
PLAYER_AI = 'program'
# Values for the Termination tag
TERMINATE_ABANDONED = 'abandoned'
TERMINATE_ADJUDICATION = 'adjudication'
TERMINATE_DEATH = 'death'
TERMINATE_EMERGENCY = 'emergency'
TERMINATE_NORMAL = 'normal'
TERMINATE_RULES_INFRACTION = 'rules infraction'
TERMINATE_TIME_FORFEIT = 'time forfeit'
TERMINATE_UNTERMINATED = 'unterminated'
# Comments are bounded by ';' to '\n' or '{' to '}'
# Lines starting with '%' are ignored and are used as an extension mechanism
# Strings are bounded by '"' and '"' and quotes inside the strings are escaped with '\"'
# Token types
TOKEN_LINE_COMMENT = 'Line comment'
TOKEN_COMMENT = 'Comment'
TOKEN_ESCAPED = 'Escaped data'
TOKEN_PERIOD = 'Period'
TOKEN_TAG_START = 'Tag start'
TOKEN_TAG_END = 'Tag end'
TOKEN_STRING = 'String'
TOKEN_SYMBOL = 'Symbol'
TOKEN_RAV_START = 'RAV start'
TOKEN_RAV_END = 'RAV end'
TOKEN_XML = 'XML'
TOKEN_NAG = 'NAG'
class Error(Exception):
"""PGN exception class"""
pass
class PGNParser:
"""
"""
STATE_IDLE = 'IDLE'
STATE_TAG_NAME = 'TAG_NAME'
STATE_TAG_VALUE = 'TAG_VALUE'
STATE_TAG_END = 'TAG_END'
STATE_MOVETEXT = 'MOVETEXT'
STATE_RAV = 'RAV'
STATE_XML = 'XML'
def __init__(self, maxGames = -1):
expressions = ['\%.*', # Escaped data
';.*', # Line comment
'\{', # Comment start
'\".*\"', # String
'[a-zA-Z0-9\*\_\+\#\=\:\-\/]+', # Symbol, '/' Not in spec but required from game draw and incomplete
'\[', # Tag start
'\]', # Tag end
'\$[0-9]{1,3}', # NAG
'\(', # RAV start
'\)', # RAV end
'\<.*\>', # XML
'[.]+'] # Period(s)
self.regexp = re.compile('|'.join(expressions))
self.tokens = {';': TOKEN_LINE_COMMENT,
'{': TOKEN_COMMENT,
'[': TOKEN_TAG_START,
']': TOKEN_TAG_END,
'"': TOKEN_STRING,
'.': TOKEN_PERIOD,
'$': TOKEN_NAG,
'(': TOKEN_RAV_START,
')': TOKEN_RAV_END,
'<': TOKEN_XML,
'%': TOKEN_ESCAPED}
for c in '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ*':
self.tokens[c] = TOKEN_SYMBOL
self.games = []
self.maxGames = maxGames
self.comment = None
self.state = self.STATE_IDLE
self.game = PGNGame() # Game being assembled
self.tagName = None # The tag being assembled
self.tagValue = None
self.prevTokenIsMoveNumber = False
self.currentMoveNumber = 0
self.ravDepth = 0 # The Recursive Annotation Variation (RAV) stack
def _parseTokenMovetext(self, tokenType, data):
"""
"""
if tokenType is TOKEN_SYMBOL:
# Ignore tokens inside RAV
if self.ravDepth != 0:
return
# See if this is a game terminate
if results.has_key(data):
self.games.append(self.game)
self.game = PGNGame()
self.prevTokenIsMoveNumber = False
self.currentMoveNumber = 0
self.ravDepth = 0
self.state = self.STATE_IDLE
# Otherwise it is a move number or a move
else:
try:
moveNumber = int(data)
except ValueError:
move = PGNMove()
move.number = self.currentMoveNumber
move.move = data
self.game.addMove(move)
self.currentMoveNumber += 1
else:
self.prevTokenIsMoveNumber = True
expected = (self.currentMoveNumber / 2) + 1
if moveNumber != expected:
raise Error('Expected move number %i, got %i' % (expected, moveNumber))
elif tokenType is TOKEN_NAG:
# Ignore tokens inside RAV
if self.ravDepth != 0:
return
move = self.game.getMove(self.currentMoveNumber)
move.nag = data
elif tokenType is TOKEN_PERIOD:
# Ignore tokens inside RAV
if self.ravDepth != 0:
return
if self.prevTokenIsMoveNumber is False:
raise Error('Unexpected period')
elif tokenType is TOKEN_RAV_START:
self.ravDepth += 1
# FIXME: Check for RAV errors
return
elif tokenType is TOKEN_RAV_END:
self.ravDepth -= 1
# FIXME: Check for RAV errors
return
|
else:
raise Error('Unknown token %s in movetext' % (str(tokenType)))
def parseToken(self, tokenType, data):
"""
"""
| # Ignore all comments at any time
if tokenType is TOKEN_LINE_COMMENT or tokenType is TOKEN_COMMENT:
if self.currentMoveNumber > 0:
move = self.game.getMove(self.currentMoveNumber)
move.comment = data[1:-1]
return
if self.state is self.STATE_MOVETEXT:
self._parseTokenMovetext(tokenType, data)
elif self.state is self.STATE_IDLE:
if tokenType is TOKEN_TAG_START:
self.state = self.STATE_TAG_NAME
return
elif tokenType is TOKEN_SYMBOL:
self.whiteMove = None
self.prevTokenIsMoveNumber = False
self.ravDepth = 0
self.state = self.STATE_MOVETEXT
self._parseTokenMovetext(tokenType, data)
elif tokenType is TOKEN_ESCAPED:
pass
else:
raise Error( |
Xowap/ansible | lib/ansible/constants.py | Python | gpl-3.0 | 16,541 | 0.013482 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
import os
import pwd
import sys
from string import ascii_letters, digits
from six import string_types
from six.moves import configparser
from ansible.parsing.splitter import unquote
from ansible.errors import AnsibleOptionsError
# copied from utils, avoid circular reference fun :)
def mk_boolean(value):
if value is None:
return False
val = str(value)
if val.lower() in [ "true", "t", "y", "1", "yes" ]:
return True
else:
return False
def get_config(p, section, key, env_var, default, boolean=False, integer=False, floating=False, islist=False):
''' return a configuration variable with casting '''
value = _get_config(p, section, key, env_var, default)
if boolean:
value = mk_boolean(value)
if value:
if integer:
value = int(value)
elif floating:
value = float(value)
elif islist:
if isinstance(value, string_types):
value = [x.strip() for x in value.split(',')]
elif isinstance(value, string_types):
value = unquote(value)
return value
def _get_config(p, section, key, env_var, default):
''' helper function for get_config '''
if env_var is not None:
value = os.environ.get(env_var, None)
if value is not None:
return value
if p is not None:
try:
return p.get(section, key, raw=True)
except:
return default
return default
def load_config_file():
''' Load Config File order(first found is used): ENV, CWD, HOME, /etc/ansible '''
p = configparser.ConfigParser()
path0 = os.getenv("ANSIBLE_CONFIG", None)
if path0 is not None:
path0 = os.path.expanduser(path0)
if os.path.isdir(path0):
path0 += "/ansible.cfg"
path1 = os.getcwd() + "/ansible.cfg"
path2 = os.path.expanduser("~/.ansible.cfg")
path3 = "/etc/ansible/ansible.cfg"
for path in [path0, path1, path2, path3] | :
if path is not None and os.path.exists(path):
try:
p.read(path)
except configparser.Error as e:
| raise AnsibleOptionsError("Error reading config file: \n{0}".format(e))
return p, path
return None, ''
def shell_expand_path(path):
''' shell_expand_path is needed as os.path.expanduser does not work
when path is None, which is the default for ANSIBLE_PRIVATE_KEY_FILE '''
if path:
path = os.path.expanduser(os.path.expandvars(path))
return path
p, CONFIG_FILE = load_config_file()
active_user = pwd.getpwuid(os.geteuid())[0]
# check all of these extensions when looking for yaml files for things like
# group variables -- really anything we can load
YAML_FILENAME_EXTENSIONS = [ "", ".yml", ".yaml", ".json" ]
# sections in config file
DEFAULTS='defaults'
# generally configurable things
DEFAULT_DEBUG = get_config(p, DEFAULTS, 'debug', 'ANSIBLE_DEBUG', False, boolean=True)
DEFAULT_HOST_LIST = shell_expand_path(get_config(p, DEFAULTS, 'hostfile', 'ANSIBLE_HOSTS', get_config(p, DEFAULTS,'inventory','ANSIBLE_INVENTORY', '/etc/ansible/hosts')))
DEFAULT_MODULE_PATH = get_config(p, DEFAULTS, 'library', 'ANSIBLE_LIBRARY', None)
DEFAULT_ROLES_PATH = shell_expand_path(get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH', '/etc/ansible/roles'))
DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '$HOME/.ansible/tmp')
DEFAULT_MODULE_NAME = get_config(p, DEFAULTS, 'module_name', None, 'command')
DEFAULT_PATTERN = get_config(p, DEFAULTS, 'pattern', None, '*')
DEFAULT_FORKS = get_config(p, DEFAULTS, 'forks', 'ANSIBLE_FORKS', 5, integer=True)
DEFAULT_MODULE_ARGS = get_config(p, DEFAULTS, 'module_args', 'ANSIBLE_MODULE_ARGS', '')
DEFAULT_MODULE_LANG = get_config(p, DEFAULTS, 'module_lang', 'ANSIBLE_MODULE_LANG', 'en_US.UTF-8')
DEFAULT_TIMEOUT = get_config(p, DEFAULTS, 'timeout', 'ANSIBLE_TIMEOUT', 10, integer=True)
DEFAULT_POLL_INTERVAL = get_config(p, DEFAULTS, 'poll_interval', 'ANSIBLE_POLL_INTERVAL', 15, integer=True)
DEFAULT_REMOTE_USER = get_config(p, DEFAULTS, 'remote_user', 'ANSIBLE_REMOTE_USER', active_user)
DEFAULT_ASK_PASS = get_config(p, DEFAULTS, 'ask_pass', 'ANSIBLE_ASK_PASS', False, boolean=True)
DEFAULT_PRIVATE_KEY_FILE = shell_expand_path(get_config(p, DEFAULTS, 'private_key_file', 'ANSIBLE_PRIVATE_KEY_FILE', None))
DEFAULT_REMOTE_PORT = get_config(p, DEFAULTS, 'remote_port', 'ANSIBLE_REMOTE_PORT', None, integer=True)
DEFAULT_ASK_VAULT_PASS = get_config(p, DEFAULTS, 'ask_vault_pass', 'ANSIBLE_ASK_VAULT_PASS', False, boolean=True)
DEFAULT_VAULT_PASSWORD_FILE = shell_expand_path(get_config(p, DEFAULTS, 'vault_password_file', 'ANSIBLE_VAULT_PASSWORD_FILE', None))
DEFAULT_TRANSPORT = get_config(p, DEFAULTS, 'transport', 'ANSIBLE_TRANSPORT', 'smart')
DEFAULT_SCP_IF_SSH = get_config(p, 'ssh_connection', 'scp_if_ssh', 'ANSIBLE_SCP_IF_SSH', False, boolean=True)
DEFAULT_SFTP_BATCH_MODE = get_config(p, 'ssh_connection', 'sftp_batch_mode', 'ANSIBLE_SFTP_BATCH_MODE', True, boolean=True)
DEFAULT_MANAGED_STR = get_config(p, DEFAULTS, 'ansible_managed', None, 'Ansible managed: {file} modified on %Y-%m-%d %H:%M:%S by {uid} on {host}')
DEFAULT_SYSLOG_FACILITY = get_config(p, DEFAULTS, 'syslog_facility', 'ANSIBLE_SYSLOG_FACILITY', 'LOG_USER')
DEFAULT_KEEP_REMOTE_FILES = get_config(p, DEFAULTS, 'keep_remote_files', 'ANSIBLE_KEEP_REMOTE_FILES', False, boolean=True)
DEFAULT_HASH_BEHAVIOUR = get_config(p, DEFAULTS, 'hash_behaviour', 'ANSIBLE_HASH_BEHAVIOUR', 'replace')
DEFAULT_PRIVATE_ROLE_VARS = get_config(p, DEFAULTS, 'private_role_vars', 'ANSIBLE_PRIVATE_ROLE_VARS', False, boolean=True)
DEFAULT_JINJA2_EXTENSIONS = get_config(p, DEFAULTS, 'jinja2_extensions', 'ANSIBLE_JINJA2_EXTENSIONS', None)
DEFAULT_EXECUTABLE = get_config(p, DEFAULTS, 'executable', 'ANSIBLE_EXECUTABLE', '/bin/sh')
DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower()
DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', ''))
DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, boolean=True)
DEFAULT_INVENTORY_IGNORE = get_config(p, DEFAULTS, 'inventory_ignore_extensions', 'ANSIBLE_INVENTORY_IGNORE', ["~", ".orig", ".bak", ".ini", ".cfg", ".retry", ".pyc", ".pyo"], islist=True)
# selinux
DEFAULT_SELINUX_SPECIAL_FS = get_config(p, 'selinux', 'special_context_filesystems', None, 'fuse, nfs, vboxsf, ramfs', islist=True)
### PRIVILEGE ESCALATION ###
# Backwards Compat
DEFAULT_SU = get_config(p, DEFAULTS, 'su', 'ANSIBLE_SU', False, boolean=True)
DEFAULT_SU_USER = get_config(p, DEFAULTS, 'su_user', 'ANSIBLE_SU_USER', 'root')
DEFAULT_SU_EXE = get_config(p, DEFAULTS, 'su_exe', 'ANSIBLE_SU_EXE', 'su')
DEFAULT_SU_FLAGS = get_config(p, DEFAULTS, 'su_flags', 'ANSIBLE_SU_FLAGS', '')
DEFAULT_ASK_SU_PA |
christianmemije/kolibri | kolibri/auth/filters.py | Python | mit | 10,993 | 0.005003 | from six import string_types
from django.db import models
from django.db.models.query import F
from .constants import collection_kinds
from .errors import InvalidHierarchyRelationsArgument
class HierarchyRelationsFilter(object):
"""
Helper class for efficiently making queries based on relations between models in the Collection hierarchy via Roles/Memberships.
To use, instantiate an instance of `HierarchyRelationsFilter`, passing in a queryset. Then, to perform hierarchy-based queries
on the queryset, call the `filter_by_hier | archy` method on the `HierarchyRelationsFilter` instance, passing arguments fixing valu | es
for models in the hierarchy structure, or linking them to fields on the base model being filtered (via F expressions).
"""
_role_extra = {
"tables": [
'"{facilityuser_table}" AS "source_user"',
'"{role_table}" AS "role"',
],
"where": [
"role.user_id = source_user.id",
"role.collection_id = ancestor_collection.id",
]
}
_collection_extra = {
"tables": [
'"{collection_table}" AS "ancestor_collection"',
'"{collection_table}" AS "descendant_collection"',
],
"where": [
"descendant_collection.lft BETWEEN ancestor_collection.lft AND ancestor_collection.rght",
"descendant_collection.tree_id = ancestor_collection.tree_id",
]
}
_facilityuser_table = [
'"{facilityuser_table}" AS "target_user"',
]
_membership_table = [
'"{membership_table}" AS "membership"',
]
def __init__(self, queryset):
# convert the provided argument from a Model class into a QuerySet as needed
if isinstance(queryset, type) and issubclass(queryset, models.Model):
queryset = queryset.objects.all()
self.queryset = queryset
self.tables = []
self.left_join_tables = []
self.where = []
# import auth models here to avoid circular imports
from .models import Role, Collection, Membership, FacilityUser
# retrieve the table names that will be used as context for building queries
self._table_names = {
"role_table": Role._meta.db_table,
"collection_table": Collection._meta.db_table,
"membership_table": Membership._meta.db_table,
"facilityuser_table": FacilityUser._meta.db_table,
}
def _add_extras(self, where, tables=None, left_join_tables=None):
self.where += where
if tables:
self.tables += [table.format(**self._table_names) for table in tables]
if left_join_tables:
self.left_join_tables += [table.format(**self._table_names) for table in left_join_tables]
def _resolve_f_expression(self, f_expr):
# try resolving the F expression; if it doesn't refer to a valid field or related field it will throw a FieldError
expression = f_expr.resolve_expression(self.queryset.query)
# extract the components of the F expression and do a sanity check
lookups, parts, _ = self.queryset.query.solve_lookup_type(f_expr.name)
assert len(lookups) == 1 and lookups[0] == "exact" # F expression should not have qualifiers like __gt, __contains, etc
# replace the last part of the reference with the target field name (e.g. this will replace `my_fkname` with `my_fkname_id`)
parts[-1] = expression.target.get_attname()
# join together the table name and field names to get a SQL-style reference to the target field
return ".".join([self.queryset.model._meta.db_table] + parts)
def _as_sql_reference(self, ref):
if hasattr(ref, "id"): # ref is a model instance; return its ID
return "'{}'".format(ref.id)
elif isinstance(ref, string_types) or isinstance(ref, int): # ref is a string or integer; assume it's an ID
return "'{}'".format(ref)
elif isinstance(ref, F): # ref is an F expression; resolve it to a SQL reference
return self._resolve_f_expression(ref)
else:
raise InvalidHierarchyRelationsArgument("Not a valid reference: %r" % ref)
def _join_with_logical_operator(self, lst, operator):
op = ") {operator} (".format(operator=operator)
return "(({items}))".format(items=op.join(lst))
def _is_non_facility_user(self, user):
from .models import KolibriAbstractBaseUser, FacilityUser
return isinstance(user, KolibriAbstractBaseUser) and not isinstance(user, FacilityUser)
def filter_by_hierarchy(self,
source_user=None,
role_kind=None,
ancestor_collection=None,
descendant_collection=None,
target_user=None):
"""
Filters a queryset through a multi-table join through the Collection hierarchy and Roles/Collections.
To anchor the hierarchy model relations back into the main queryset itself, use F expressions. For example, if
you are filtering on a FacilityUser queryset, and want to return all users that have an admin role for
collection `mycoll`, you would use something like:
`FacilityUser.objects.filter_by_hierarchy(source_user=F("id"), role_kind=ADMIN, descendant_collection=mycoll)`
(Here, `source_user=F("id")` means that the id of the source user is the same as the id of the model being filtered,
i.e. we're "filtering over source users" in the hierarchy structure.)
:param source_user: a specific value, or F expression, to constrain the source FacilityUser in the hierarchy structure
:param role_kind: a specific value, or F expression, to constrain the Role kind in the hierarchy structure
:param ancestor_collection: a specific value, or F expression, to constrain the ancestor Collection in the hierarchy structure
:param descendant_collection: a specific value, or F expression, to constrain the descendant Collection in the hierarchy structure
:param target_user: a specific value, or F expression, to constrain the target FacilityUser in the hierarchy structure
:return: a filtered queryset with all the hierarchy structure conditions applied, as well as conditions based on provided arguments
:rtype: QuerySet
"""
# if either the source or target user is not a facility user, return an empty queryset
if self._is_non_facility_user(source_user) or self._is_non_facility_user(target_user):
return self.queryset.none()
################################################################################################################
# 1. Determine which components of the hierarchy tree are relevant to the current query, and add in the
# corresponding tables and base conditions to establish the relationships between them.
################################################################################################################
# 1(a). If needed, add in the SQL to establish the relationships between the target user (member) and the collections.
if target_user: # there are two ways for the target user to be a member of the ancestor collection:
# the first way is via the collection hierarchy; having a Membership for the descendant collection
membership_via_hierarchy_where = self._join_with_logical_operator([
"membership.user_id = target_user.id",
"membership.collection_id = descendant_collection.id",
], "AND")
# the second, if the ancestor collection is the facility, is by virtue of being associated with that facility
member_via_facility_where = self._join_with_logical_operator([
"ancestor_collection.kind = '{facility_kind}'".format(facility_kind=collection_kinds.FACILITY),
"ancestor_collection.dataset_id = target_user.dataset_id",
], "AND")
where_clause = self._join_with_logical_operator([memb |
mark-burnett/filament-dynamics | actin_dynamics/primitives/__init__.py | Python | gpl-3.0 | 776 | 0 | # Copyright (C) 2010 Mark Burnett
#
# This program is free soft | ware: you can redistribute it and/or modify
# it under the term | s of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import analyses
import file_readers
import objectives
del meta_classes
|
pgaref/HTTP_Request_Randomizer | http_request_randomizer/requests/parsers/FreeProxyParser.py | Python | mit | 3,551 | 0.003943 | import logging
import requests
from bs4 import BeautifulSoup
from http_request_randomizer.requests.parsers.UrlParser import UrlParser
from http_request_randomizer.requests.proxy.ProxyObject import ProxyObject, AnonymityLevel, Protocol
logger = logging.getLogger(__name__)
__author__ = 'pgaref'
class FreeProxyParser(UrlParser):
def __init__(self, id, web_url, timeout=None):
UrlParser.__init__(self, id=id, web_url=web_url, timeout=timeout)
def parse_proxyList(self):
curr_proxy_list = []
try:
response = requests.get(self.get_url(), timeout=self.timeout)
if not response.ok:
logger.warning("Proxy Provider url faile | d: {}".format(self.get_url()))
return []
content = response.content
soup = BeautifulSoup(content, "html.parser")
table = soup.find("table", attrs={"id": "proxylisttable"})
|
# The first tr contains the field names.
headings = [th.get_text() for th in table.find("tr").find_all("th")]
datasets = []
for row in table.find_all("tr")[1:-1]:
dataset = zip(headings, (td.get_text() for td in row.find_all("td")))
if dataset:
datasets.append(dataset)
for dataset in datasets:
proxy_obj = self.create_proxy_object(dataset)
# Make sure it is a Valid Proxy Address
if proxy_obj is not None and UrlParser.valid_ip_port(proxy_obj.get_address()):
curr_proxy_list.append(proxy_obj)
else:
logger.debug("Proxy Invalid: {}".format(dataset))
except AttributeError as e:
logger.error("Provider {0} failed with Attribute error: {1}".format(self.id, e))
except KeyError as e:
logger.error("Provider {0} failed with Key error: {1}".format(self.id, e))
except Exception as e:
logger.error("Provider {0} failed with Unknown error: {1}".format(self.id, e))
finally:
return curr_proxy_list
def create_proxy_object(self, dataset):
# Check Field[0] for tags and field[1] for values!
ip = ""
port = None
anonymity = AnonymityLevel.UNKNOWN
country = None
protocols = []
for field in dataset:
if field[0] == 'IP Address':
# Make sure it is a Valid IP
ip = field[1].strip() # String strip()
# Make sure it is a Valid IP
if not UrlParser.valid_ip(ip):
logger.debug("IP with Invalid format: {}".format(ip))
return None
elif field[0] == 'Port':
port = field[1].strip() # String strip()
elif field[0] == 'Anonymity':
anonymity = AnonymityLevel.get(field[1].strip()) # String strip()
elif field[0] == 'Country':
country = field[1].strip() # String strip()
elif field[0] == 'Https':
if field[1].strip().lower() == 'yes': protocols.extend([Protocol.HTTP, Protocol.HTTPS])
elif field[1].strip().lower() == 'no': protocols.append(Protocol.HTTP)
return ProxyObject(source=self.id, ip=ip, port=port, anonymity_level=anonymity, country=country, protocols=protocols)
def __str__(self):
return "{0} parser of '{1}' with required bandwidth: '{2}' KBs" \
.format(self.id, self.url, self.minimum_bandwidth_in_KBs)
|
NavarraBiomed/clips | studies_app/migrations/0006_auto_20161205_1225.py | Python | gpl-2.0 | 6,384 | 0.002193 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('studies_app', '0005_auto_20161205_1202'),
]
operations = [
migrations.Remove | Field(
model_name='observationalcase',
name='clips_control_group',
),
migrations.Rem | oveField(
model_name='observationalcase',
name='clips_exp_date',
),
migrations.RemoveField(
model_name='observationalcase',
name='clips_n_lote',
),
migrations.RemoveField(
model_name='observationalcase',
name='clips_tratment_group',
),
migrations.RemoveField(
model_name='observationalcase',
name='cs_coagulation_mode',
),
migrations.RemoveField(
model_name='observationalcase',
name='cs_coagulation_watts',
),
migrations.RemoveField(
model_name='observationalcase',
name='cs_cut_mode',
),
migrations.RemoveField(
model_name='observationalcase',
name='cs_cut_watts',
),
migrations.RemoveField(
model_name='observationalcase',
name='histol_simplified',
),
migrations.RemoveField(
model_name='observationalcase',
name='not_tired_closure_by',
),
migrations.RemoveField(
model_name='observationalcase',
name='surgery_by_complication',
),
migrations.RemoveField(
model_name='observationalcase',
name='surgery_from_endoscopy',
),
migrations.AddField(
model_name='observationalcase',
name='clipping',
field=models.IntegerField(blank=True, null=True, verbose_name='Clipping', choices=[(0, 'No'), (1, 'Punctual (VISIBLE\xa0VESSEL)'), (2, 'Partially clipped'), (3, 'Complete clip closure')]),
),
migrations.AddField(
model_name='observationalcase',
name='number_of_sessions',
field=models.IntegerField(blank=True, null=True, verbose_name='Number of endoscopic sessions needed', choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9)]),
),
migrations.AddField(
model_name='observationalcase',
name='surgery',
field=models.IntegerField(blank=True, null=True, verbose_name='Surgery', choices=[(0, 'No needed'), (1, 'Primary for technical reasons'), (2, 'Primary for suspected invasiveness'), (3, 'Due to bleeding'), (4, 'Due to perforation'), (5, 'Histological reasons'), (6, 'Clinical/patient decision')]),
),
migrations.AlterField(
model_name='clipscase',
name='bleeding_treatment',
field=models.IntegerField(blank=True, null=True, verbose_name='Bleeding treatment', choices=[(0, 'No'), (1, 'Injection'), (2, 'Clipping'), (3, 'ArgonPC'), (4, 'Coagulation forceps'), (5, '2 methods'), (6, 'Snare tip coagulation')]),
),
migrations.AlterField(
model_name='clipscase',
name='hospital_stay_by_complication',
field=models.IntegerField(blank=True, null=True, verbose_name='Hospital stay by complication (days)'),
),
migrations.AlterField(
model_name='clipscase',
name='hospital_stay_by_technique',
field=models.IntegerField(blank=True, null=True, verbose_name='Hospital stay by technique (days)'),
),
migrations.AlterField(
model_name='observationalcase',
name='bleeding_treatment',
field=models.IntegerField(blank=True, null=True, verbose_name='Bleeding treatment', choices=[(0, 'No'), (1, 'Injection'), (2, 'Clipping'), (3, 'ArgonPC'), (4, 'Coagulation forceps'), (5, '2 methods'), (6, 'Snare tip coagulation')]),
),
migrations.AlterField(
model_name='observationalcase',
name='budding',
field=models.IntegerField(blank=True, null=True, verbose_name='Budding', choices=[(0, 'No or low grade'), (1, 'High grade')]),
),
migrations.AlterField(
model_name='observationalcase',
name='histology',
field=models.IntegerField(blank=True, null=True, verbose_name='Histology', choices=[(1, 'Adenoma'), (2, 'HGD to intramucosal carcinoma in adenoma'), (3, 'Superficial submucosal carcinoma in adenoma'), (4, 'Deep submucosal carcinoma in adenoma'), (5, 'Hyperplastic'), (6, 'Sesil Serrated polyp'), (7, 'Traditional Serrated Adenoma'), (8, 'Polyp Serrated Mixed or serrated polyp with dysplasia'), (9, 'HGD to intramucosal carcinoma any serrated'), (10, 'Superficial submucosa carc. Any serrated'), (11, 'Deep submucosa carc. Any serrated'), (12, 'Carcinoid')]),
),
migrations.AlterField(
model_name='observationalcase',
name='hospital_stay_by_complication',
field=models.IntegerField(blank=True, null=True, verbose_name='Hospital stay by complication (days)'),
),
migrations.AlterField(
model_name='observationalcase',
name='hospital_stay_by_technique',
field=models.IntegerField(blank=True, null=True, verbose_name='Hospital stay by technique (days)'),
),
migrations.AlterField(
model_name='observationalcase',
name='successful_treatment',
field=models.IntegerField(blank=True, null=True, verbose_name=' Successful treatment', choices=[(0, 'No (surgery)'), (1, 'Yes (endoscopic treatment)')]),
),
migrations.AlterField(
model_name='obsinternationalcase',
name='bleeding_treatment',
field=models.IntegerField(blank=True, null=True, verbose_name='Bleeding treatment', choices=[(0, 'No'), (1, 'Injection'), (2, 'Clipping'), (3, 'ArgonPC'), (4, 'Coagulation forceps'), (5, '2 methods'), (6, 'Snare tip coagulation')]),
),
migrations.AlterField(
model_name='obsinternationalcase',
name='budding',
field=models.IntegerField(blank=True, null=True, verbose_name='Budding', choices=[(0, 'No or low grade'), (1, 'High grade')]),
),
]
|
tudennis/LeetCode---kamyu104-11-24-2015 | Python/flip-game.py | Python | mit | 1,018 | 0.005894 | # Time: O(c * n + n) = O(n * (c+1))
# Space: O(n)
# This solution compares only O(1) times for the two consecutive "+"
class Solution(object):
def generatePossibleNextMoves(self, s):
"""
:type s: str
:rtype: List[str]
| """
res = []
i, n = 0, len(s) - 1
while i < n: # O(n) time
if s[i] = | = '+':
while i < n and s[i+1] == '+': # O(c) time
res.append(s[:i] + '--' + s[i+2:]) # O(n) time and space
i += 1
i += 1
return res
# Time: O(c * m * n + n) = O(c * n + n), where m = 2 in this question
# Space: O(n)
# This solution compares O(m) = O(2) times for two consecutive "+", where m is length of the pattern
class Solution2(object):
def generatePossibleNextMoves(self, s):
"""
:type s: str
:rtype: List[str]
"""
return [s[:i] + "--" + s[i+2:] for i in xrange(len(s) - 1) if s[i:i+2] == "++"]
|
kmike/psd-tools | src/psd_tools/terminology.py | Python | mit | 52,949 | 0 | """
Constants for descriptor.
This file is automaticaly generated by tools/extract_terminology.py
"""
from enum import Enum as _Enum
class Klass(bytes, _Enum):
"""
Klass definitions extracted from PITerminology.h.
See https://www.adobe.com/devnet/photoshop/sdk.html
"""
Action = b'Actn'
ActionSet = b'ASet'
Adjustment = b'Adjs'
AdjustmentLayer = b'AdjL'
AirbrushTool = b'AbTl'
AlphaChannelOptions = b'AChl'
AntiAliasedPICTAcquire = b'AntA'
Application = b'capp'
Arrowhead = b'cArw'
Assert = b'Asrt'
AssumedProfile = b'AssP'
BMPFormat = b'BMPF'
BackgroundLayer = b'BckL'
BevelEmboss = b'ebbl'
BitmapMode = b'BtmM'
BlendRange = b'Blnd'
BlurTool = b'BlTl'
BookColor = b'BkCl'
BrightnessContrast = b'BrgC'
Brush = b'Brsh'
BurnInTool = b'BrTl'
CachePrefs = b'CchP'
CMYKColor = b'CMYC'
CMYKColorMode = b'CMYM'
CMYKSetup = b'CMYS'
Calculation = b'Clcl'
Channel = b'Chnl'
ChannelMatrix = b'ChMx'
ChannelMixer = b'ChnM'
CineonFormat = b'SDPX'
ClippingInfo = b'Clpo'
ClippingPath = b'ClpP'
CloneStampTool = b'ClTl'
Color = b'Clr '
ColorBalance = b'ClrB'
ColorCorrection = b'ClrC'
ColorPickerPrefs = b'Clrk'
ColorSampler = b'ClSm'
ColorStop = b'Clrt'
Command = b'Cmnd'
Curves = b'Crvs'
CurvePoint = b'CrPt'
CustomPalette = b'Cstl'
CurvesAdjustment = b'CrvA'
CustomPhosphors = b'CstP'
CustomWhitePoint = b'CstW'
DicomFormat = b'Dicm'
DisplayPrefs = b'DspP'
Document = b'Dcmn'
DodgeTool = b'DdTl'
DropShadow = b'DrSh'
DuotoneInk = b'DtnI'
DuotoneMode = b'DtnM'
EPSGenericFormat = b'EPSG'
EPSPICTPreview = b'EPSC'
EPSTIFFPreview = b'EPST'
Element = b'Elmn'
Ellipse = b'Elps'
EraserTool = b'ErTl'
Export = b'Expr'
FileInfo = b'FlIn'
FileSavePrefs = b'FlSv'
FlashPixFormat = b'FlsP'
FontDesignAxes = b'FntD'
Format = b'Fmt '
FrameFX = b'FrFX'
Contour = b'FxSc'
GeneralPrefs = b'GnrP'
GIF89aExport = b'GF89'
GIFFormat = b'GFFr'
GlobalAngle = b'gblA'
Gradient = b'Grdn'
GradientFill = b'Grdf'
GradientMap = b'GdMp'
GradientTool = b'GrTl'
GraySetup = b'GrSt'
Grayscale = b'Grsc'
GrayscaleMode = b'Grys'
Guide = b'Gd '
GuidesPrefs = b'GdPr'
HalftoneScreen = b'HlfS'
HalftoneSpec = b'Hlfp'
HSBColor = b'HSBC'
HSBColorMode = b'HSBM'
HistoryBrushTool = b'HBTl'
HistoryPrefs = b'CHsP'
HistoryState = b'HstS'
HueSatAdjustment = b'HStA'
HueSatAdjustmentV2 = b'Hst2'
HueSaturation = b'HStr'
IFFFormat = b'IFFF'
IllustratorPathsExport = b'IlsP'
ImagePoint = b'ImgP'
Import = b'Impr'
IndexedColorMode = b'IndC'
InkTransfer = b'InkT'
InnerGlow = b'IrGl'
InnerShadow = b'IrSh'
InterfaceColor = b'IClr'
Invert = b'Invr'
JPEGFormat = b'JPEG'
LabColor = b'LbCl'
LabColorMode = b'LbCM'
Layer = b'Lyr '
LayerEffects = b'Lefx'
LayerFXVisible = b'lfxv'
Levels = b'Lvls'
LevelsAdjustment = b'LvlA'
LightSource = b'LghS'
Line = b'Ln '
MacPaintFormat = b'McPn'
MagicEraserTool = b'MgEr'
MagicPoint = b'Mgcp'
Mask = b'Msk '
MenuItem = b'Mn '
Mode = b'Md '
MultichannelMode = b'MltC'
ObsoleteTextLayer = b'TxLy'
Null = b'null'
Offset = b'Ofst'
Opacity = b'Opac'
OuterGlow = b'OrGl'
PDFGenericFormat = b'PDFG'
PICTFileFormat = b'PICF'
PICTResourceFormat = b'PICR'
PNGFormat = b'PNGF'
PageSetup = b'PgSt'
PaintbrushTool = b'PbTl'
Path = b'Path'
PathComponent = b'PaCm'
PathPoint = b'Pthp'
Pattern = b'PttR'
PatternStampTool = b'PaTl'
PencilTool = b'PcTl'
Photoshop20Format = b'Pht2'
Photoshop35Format = b'Pht3'
PhotoshopDCS2Format = b'PhD2'
PhotoshopDCSFormat = b'PhD1'
PhotoshopEPSFormat = b'PhtE'
PhotoshopPDFFormat = b'PhtP'
Pixel = b'Pxel'
PixelPaintFormat = b'PxlP'
PluginPrefs = b'PlgP'
Point = b'Pnt '
Point16 = b'Pnt1'
Polygon = b'Plgn'
Posterize = b'Pstr'
Preferences = b'GnrP'
ProfileSetup = b'PrfS'
Property = b'Prpr'
Range = b'Rang'
Rect16 = b'Rct1'
RGBColor = b'RGBC'
RGBColorMode = b'RGBM'
RGBSetup = b'RGBt'
RawFormat = b'Rw '
Rectangle = b'Rctn'
SaturationTool = b'SrTl'
ScitexCTFormat = b'Sctx'
Selection = b'csel'
SelectiveColor = b'SlcC'
ShapingCurve = b'ShpC'
SharpenTool = b'ShTl'
SingleColumn = b'Sngc'
SingleRow = b'Sngr'
BackgroundEraserTool = b'SETl'
SolidFill = b'SoFi'
ArtHistoryBrushTool = b'ABTl'
SmudgeTool = b'SmTl'
Snapshot = b'SnpS'
SpotColorChannel = b'SCch'
Style = b'StyC'
SubPath = b'Sbpl'
TIFFFormat = b'TIFF'
TargaFormat = b'TrgF'
TextLayer = b'TxLr'
TextStyle = b'TxtS'
TextStyleRange = b'Txtt'
Threshold = b'Thrs'
Tool = b'Tool'
TransferSpec = b'Trfp'
TransferPoint = b'DtnP'
TransparencyPrefs = b'TrnP'
TransparencyStop = b'TrnS'
UnitsPrefs = b'UntP'
UnspecifiedColor = b'UnsC'
Version = b'Vrsn'
WebdavPrefs = b'Wdbv'
XYYColor = b'XYYC'
ChromeFX = b'ChFX'
BackLight = b'BakL'
FillFlash = b'FilF'
ColorCast = b'ColC'
# Unknown class
EXRf = b'EXRf'
class Enum(bytes, _Enum):
"""
Enum definitions extracted from PITerminology.h.
See https://www.adobe.com/devnet/photoshop/sdk.html
"""
Add = b'Add '
AmountHigh = b'amHi'
AmountLow = b'amLo'
AmountMedium = b'amMd'
AntiAliasNone = b'Anno'
AntiAliasLow = b'AnLo'
AntiAliasMedium = b'AnMd'
AntiAliasHigh = b'AnHi'
AntiAliasCrisp = b'AnCr'
AntiAliasStrong = b'AnSt'
AntiAliasSmooth = b'AnSm'
AppleRGB = b'AppR'
ASCII = b'ASCI'
AskWhenOpening = b'AskW'
Bicubic = b'Bcbc'
Binary = b'Bnry'
MonitorSetup = b'MntS'
_16BitsPerPixel = b'16Bt'
_1BitPerPixel = b'OnBt'
_2BitsPerPixel = b'2Bts'
_32BitsPerPixel = b'32Bt'
_4BitsPerPixel = b'4Bts'
_5000 = b'5000'
_5500 = b'5500'
_6500 = b'6500'
_72Color = b'72Cl'
_72Gray = b'72Gr'
_7500 = b'7500'
_8BitsPerPixel = b'EghB'
_9300 = b'9300'
A = b'A '
AbsColorimetric = b'AClr'
ADSBottoms = b'AdBt'
ADSCentersH = b'AdCH'
ADSCentersV = b'AdCV'
ADSHorizontal = b'AdHr'
ADSLefts = b'AdLf'
ADSRights = b'AdRg'
ADSTops = b'AdTp'
ADSVertical = b'AdVr'
AboutApp = b'AbAp'
Absolute = b'Absl'
ActualPixels = b'ActP'
Adaptive = b'Adpt'
AdjustmentOptions = b'AdjO'
AirbrushEraser = b'Arbs'
All = b'Al '
Amiga = b'Amga'
Angle = b'Angl'
Any = b'Any '
ApplyImage = b'AplI'
AroundCenter = b'ArnC'
Arrange = b'Arng'
Ask = b'Ask '
B = b'B '
Back = b'Back'
Background = b'Bckg'
BackgroundColor = b'BckC'
Backward = b'Bckw'
Behind = b'Bhnd'
Best = b'Bst '
Better = b'Dthb'
Bilinear = b'Blnr'
BitDepth1 = b'BD1 '
Bi | tDepth16 = b'BD16'
BitDepth24 = b'BD24'
BitDepth32 = b'BD32'
BitDepth4 = b'BD4 '
BitDepth8 = b'BD8 '
BitDepthA1R5G5B5 = b'1565'
BitDepthR5G6B5 = b'x565'
BitDepthX4R4G4B4 = b'x444'
BitDepthA4R4G4B4 = b'4444'
BitDepthX8R8G8B8 = b'x888'
Bitmap = b'Btmp'
Black = b'Blck'
BlackAndWhite = b'BanW'
BlackBody = b'BlcB'
Blacks = b'Blks'
BlockEraser = b'Blk '
Blast = b'Blst'
| Blocks = b'Blks'
Blue = b'Bl '
Blues = b'Bls '
Bottom = b'Bttm'
BrushDarkRough = b'BrDR'
BrushesAppend = b'BrsA'
BrushesDefine = b'BrsD'
BrushesDelete = b'Brsf'
BrushesLoad = b'Brsd'
BrushesNew = b'BrsN'
BrushesOptions = b'BrsO'
BrushesReset = b'BrsR'
BrushesSave = b'Brsv'
BrushLightRough = b'BrsL'
BrushSimple = b'BrSm'
BrushSize = b'BrsS'
BrushSparkle = b'BrSp'
BrushWideBlurry = b'BrbW'
BrushWideSharp = b'BrsW'
Builtin = b'Bltn'
BurnInH = b'BrnH'
BurnInM = b'BrnM'
BurnInS = b'BrnS'
ButtonMode = b'BtnM'
CIERGB = b'CRGB'
WidePhosphors = b'Wide'
WideGamutRGB = b'WRGB'
CMYK |
chipx86/reviewboard | reviewboard/datagrids/sidebar.py | Python | mit | 13,527 | 0 | """Sidebar item management for datagrids."""
from __future__ import unicode_literals
from django.utils import six
from django.utils.six.moves.urllib.parse import urlencode
from djblets.util.compat.django.template.loader import render_to_string
from reviewboard.site.urlresolvers import local_site_reverse
class BaseSidebarItem(object):
"""Base class for an item on the sidebar of a datagrid.
Items can optionally have labels and counts associated with them.
Depending on the subclass, it may also be able to nest items.
They may also have custom templates, for more advanced rendering.
See :py:class:`SidebarNavItem` and :py:class:`BaseSidebarSection` for the
common types of sidebar items.
Attributes:
datagrid (djblets.datagrid.grids.DataGrid):
The datagrid containing this item.
sidebar (Sidebar):
The sidebar containing this item.
"""
#: The template to use for rendering this item in the sidebar.
template_name = None
#: The displayed label for the item.
label = None
#: The name of the optional CSS icon to use beside the label.
icon_name = None
#: The datagrid "view" to link to when clicking this item.
#:
#: This corresponds to the ``?view=`` parameter passed to the datagrid
#: page.
view_id = None
#: Additional key/values to pass to the URL when clicking this item.
#:
#: If provided, this must be a dictionary of keys and values for the
#: URL. The keys and values will be automatically URL-encoded.
view_args = None
#: Additional CSS classes to include for the item.
css_classes = None
def __init__(self, sidebar, datagrid):
"""Initialize the sidebar item.
Args:
sidebar (Sidebar):
The sidebar containing this item.
datagrid (djblets.datagrid.grids.DataGrid):
The datagrid containing this item.
"""
self.sidebar = sidebar
self.datagrid = datagrid
def get_url(self):
"""Return the URL used when clicking the item.
By default, this builds a URL to the parent datagrid using
the ``view_id`` and ``view_args`` attributes. If they are not
set, then the item won't be clickable.
Returns:
unicode:
The URL to the dashboard view represented by this item.
"""
if not self.view_id and not self.view_args:
return None
if self.view_args:
url_args = self.view_args.copy()
else:
url_args = {}
if self.view_id:
url_args['view'] = self.view_id
return '%s?%s' % (self.datagrid.request.path, urlencode(url_args))
def get_count(self):
"""Return the count shown for this item.
By default, this shows nothing. Subclasses can override to display
a count.
Returns:
int:
The count to display beside the item, or ``None`` if no count
should be displayed.
"""
return None
def is_visible(self):
"""Return whether the item is visible.
By default, an item is visible. Subclasses can override this to
control visibility.
Returns:
bool:
``True`` if the item is visible. ``False`` if it's hidden.
"""
return True
def is_active(self):
"""Return whether the item is currently active.
The item will be active if the current page matches the URL
associated with the item.
Returns:
bool:
``True`` if the item represents the active page. ``False`` if it
does not.
"""
if self.view_id is None:
return False
request = self.datagrid.request
view_id = request.GET.get('view', self.sidebar.default_view_id)
if view_id != self.view_id:
return False
if self.view_args:
for key, value in six.iteritems(self.view_args):
if request.GET.get(key) != value:
return False
return True
def render(self):
"""Render the item.
Returns:
django.utils.safestring.SafeText:
The rendered HTML for the item.
"""
count = self.get_count()
context = {
'datagrid': self.datagrid,
'label': self.label,
'icon_name': self.icon_name or '',
'view_id': self.view_id,
'view_args': self.view_args,
'count': count,
'has_count': count is not None,
'url': self.get_url(),
'active': self.is_active(),
'css_classes': self.css_classes or [],
}
context.update(self.get_extra_context())
return render_to_string(template_name=self.template_name,
context=context,
request=self.datagrid.request)
def get_extra_context(self):
"""Return extra context for the render.
Returns:
dict:
A dictionary of addit | ional template context. By default, this is
empty.
"""
return {}
class BaseSidebarSection(BaseSidebarItem):
"""Base class for a section of items on the sidebar.
Subclasses can override this to define a section and provide items
listed in the section.
Sections can optionally be clickable and dis | play a count.
"""
template_name = 'datagrids/sidebar_section.html'
def __init__(self, *args, **kwargs):
"""Initialize the section.
Args:
*args (tuple):
Positional arguments to pass to the parent class.
**kwargs (dict):
Keyword arguments to pass to the parent class.
"""
super(BaseSidebarSection, self).__init__(*args, **kwargs)
self.items = list(self.get_items())
def get_items(self):
"""Return the items displayed in this section.
Subclasses must override this and return or yield the items
to be displayed.
Returns:
list of BaseSidebarItem:
The list of items to display in the section.
"""
raise NotImplementedError
def is_visible(self):
"""Return whether the section is visible.
By default, a section is visible if it has any item classes
registered. Subclasses can override this to provide more specific
logic.
Returns:
bool:
``True`` if the section is visible. ``False`` if it's hidden.
"""
return len(self.items) > 0
def get_extra_context(self):
"""Return extra context for the section.
Subclasses that override this method must call the parent method.
Returns:
dict:
Additional template context for the rendering of the section.
"""
return {
'items': self.items,
}
class SidebarNavItem(BaseSidebarItem):
"""A typical navigation link item on the sidebar.
This is the standard type of item added to sections on a sidebar. An item
can contain an explicit URL or a resolvable URL name to link to. If not
provided, the current datagrid page's URL will be used along with query
arguments built from :py:attr:`view_id` and :py:attr:`view_args`.
"""
template_name = 'datagrids/sidebar_nav_item.html'
def __init__(self, section, label, icon_name=None, view_id=None,
view_args=None, count=None, url=None, url_name=None,
css_classes=None):
"""Initialize the item.
Args:
section (BaseSidebarSection):
The section that should contain this item.
label (unicode):
The displayed label for this item.
icon_name (unicode, optional):
The name of the optional CSS icon to display beside the label.
view_id (unicode, optional):
The ID of the optional datagrid view to display when clicking
the item |
nsmoooose/csp | csp/base/applog.py | Python | gpl-2.0 | 1,162 | 0.001721 | # Copyright 2004 Mark Rose <mkrose@users.sour | ceforge.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Gene | ral Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
Define standard formating for application logs.
"""
import sys
import logging
logging.addLevelName(logging.DEBUG, 'D')
logging.addLevelName(logging.INFO, 'I')
logging.addLevelName(logging.WARNING, 'W')
logging.addLevelName(logging.ERROR, 'E')
logging.addLevelName(logging.CRITICAL, 'C')
formatter = logging.Formatter('%(asctime)s%(levelname)s_%(module)s.%(lineno)d %(message)s', '%m%d%H%M%S')
|
ruchee/vimrc | vimfiles/bundle/vim-python/submodules/astroid/astroid/brain/brain_scipy_signal.py | Python | mit | 2,437 | 0.00041 | # Copyright (c) 2019 Valentin Valls <valentin.valls@esrf.fr>
# Copyright (c) 2020-2021 hippo91 <guill | aume.peillex@gmail.com>
# Copyright (c) 2020 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licen | ses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/LICENSE
"""Astroid hooks for scipy.signal module."""
import astroid
def scipy_signal():
return astroid.parse(
"""
# different functions defined in scipy.signals
def barthann(M, sym=True):
return numpy.ndarray([0])
def bartlett(M, sym=True):
return numpy.ndarray([0])
def blackman(M, sym=True):
return numpy.ndarray([0])
def blackmanharris(M, sym=True):
return numpy.ndarray([0])
def bohman(M, sym=True):
return numpy.ndarray([0])
def boxcar(M, sym=True):
return numpy.ndarray([0])
def chebwin(M, at, sym=True):
return numpy.ndarray([0])
def cosine(M, sym=True):
return numpy.ndarray([0])
def exponential(M, center=None, tau=1.0, sym=True):
return numpy.ndarray([0])
def flattop(M, sym=True):
return numpy.ndarray([0])
def gaussian(M, std, sym=True):
return numpy.ndarray([0])
def general_gaussian(M, p, sig, sym=True):
return numpy.ndarray([0])
def hamming(M, sym=True):
return numpy.ndarray([0])
def hann(M, sym=True):
return numpy.ndarray([0])
def hanning(M, sym=True):
return numpy.ndarray([0])
def impulse2(system, X0=None, T=None, N=None, **kwargs):
return numpy.ndarray([0]), numpy.ndarray([0])
def kaiser(M, beta, sym=True):
return numpy.ndarray([0])
def nuttall(M, sym=True):
return numpy.ndarray([0])
def parzen(M, sym=True):
return numpy.ndarray([0])
def slepian(M, width, sym=True):
return numpy.ndarray([0])
def step2(system, X0=None, T=None, N=None, **kwargs):
return numpy.ndarray([0]), numpy.ndarray([0])
def triang(M, sym=True):
return numpy.ndarray([0])
def tukey(M, alpha=0.5, sym=True):
return numpy.ndarray([0])
"""
)
astroid.register_module_extender(astroid.MANAGER, "scipy.signal", scipy_signal)
|
openlabs/trytond | trytond/tests/test_mixins.py | Python | gpl-3.0 | 1,711 | 0.005845 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#This file is part of Tryton. The COPYRIGHT file at the top level of
#this repository contains th | e full copyright | notices and license terms.
import unittest
import urllib
from trytond.tests.test_tryton import (POOL, DB_NAME, USER, CONTEXT,
install_module)
from trytond.transaction import Transaction
from trytond.url import HOSTNAME
class UrlTestCase(unittest.TestCase):
"Test URL generation"
def setUp(self):
install_module('test')
self.urlmodel = POOL.get('test.urlobject')
self.urlwizard = POOL.get('test.test_wizard', type='wizard')
self.hostname = HOSTNAME
def testModelURL(self):
"Test model URLs"
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.assertEqual(self.urlmodel.__url__,
'tryton://%s/%s/model/test.urlobject' % (self.hostname,
urllib.quote(DB_NAME)))
self.assertEqual(self.urlmodel(1).__url__,
'tryton://%s/%s/model/test.urlobject/1' % (self.hostname,
urllib.quote(DB_NAME)))
def testWizardURL(self):
"Test wizard URLs"
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.assertEqual(self.urlwizard.__url__,
'tryton://%s/%s/wizard/test.test_wizard' % (self.hostname,
urllib.quote(DB_NAME)))
def suite():
func = unittest.TestLoader().loadTestsFromTestCase
suite = unittest.TestSuite()
for testcase in (UrlTestCase,):
suite.addTests(func(testcase))
return suite
if __name__ == '__main__':
suite = suite()
unittest.TextTestRunner(verbosity=2).run(suite)
|
skoolkid/pyskool | pyskool/skoolsound.py | Python | gpl-3.0 | 18,865 | 0.004665 | # -*- coding: utf-8 -*-
# Copyright 2013, 2014 Richard Dymond (rjdymond@gmail.com)
#
# This file is part of Pyskool.
#
# Pyskool is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# Pyskool is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# Pyskool. If not, see <http://www.gnu.org/licenses/>.
import os
import math
FRAME_T_STATES = 69888
INTERRUPT_DELAY = 942
CONTENTION_FACTOR = 0.34
SKOOL_DAZE = 'skool_daze'
BACK_TO_SKOOL = 'back_to_skool'
SKOOL_DAZE_TAKE_TOO = 'skool_daze_take_too'
EZAD_LOOKS = 'ezad_looks'
BACK_TO_SKOOL_DAZE = 'back_to_skool_daze'
NOTES = {
'F0': -7,
'G0': -6,
'A1': -5,
'B1': -4,
'C1': -3,
'D1': -2,
'E1': -1,
'F1': 0,
'G1': 1,
'A2': 2,
'B2': 3,
'C2': 4,
'D2': 5,
'E2': 6,
'F2': 7
}
# SD 32263, BTS 24560
PITCH_DATA = (
(47,196), # F1
(53,174), # G1
| (60,154), # A2
(63,145), # B2
(71,129), # C2
(80,114), # D2
(86,107), # E2 ((90,101) in the original games, but unused)
(95,96), # F2
(107,86), # G2 (not in the original games)
)
def delays_to_samples(delays, sample_rate, max_amplitude):
sample_delay = 3500000.0 / sample_rate
samples = []
direction = 1
i | = 0
d0 = 0
d1 = delays[i]
t = 0
while 1:
while t >= d1:
i += 1
if i >= len(delays):
break
d0 = d1
d1 += delays[i]
direction *= -1
if i >= len(delays):
break
sample = direction * int(max_amplitude * math.sin(math.pi * (t - d0) / (d1 - d0)))
if sample > 32767:
sample = 32767
elif sample < -32768:
sample = 32768
elif sample < 0:
sample += 65536
samples.append(sample)
t += sample_delay
return samples
def add_contention(delays, contention=True, interrupts=False, cycle=0):
c_start = 14334
c_end = 57248
for i, delay in enumerate(delays):
d = 0
while d < delay:
if interrupts and cycle == 0:
cycle = INTERRUPT_DELAY
if i:
delay += INTERRUPT_DELAY
end = min(FRAME_T_STATES, cycle + delay - d)
if contention and c_start <= end and cycle < c_end:
contended_cycles = min(c_end, end) - max(cycle, c_start)
delay += int(contended_cycles * CONTENTION_FACTOR)
d += end - cycle
cycle = end % FRAME_T_STATES
delays[i] = delay
def sd65122(d, e, h):
delays = []
for n in range(d or 256):
delays.append(13 * (e or 256) + 50)
e = (e + h) & 255
return delays
def bts62155(d, e, h):
delays = []
for n in range(d or 256):
delays.append(13 * (e or 256) + 50)
e = (e + h) & 255
if d & 1:
delays.append(13 * (e or 256) + 52)
return delays
def bts29836(b, de):
e, d = de % 256, de // 256
inner_delay = 13 * (b or 256) + 30
delays = [inner_delay] * ((e or 256) - 1)
if d > 1:
outer_delay = inner_delay + 11
inner_delays = [inner_delay] * 255
for n in range(d - 1):
delays.append(outer_delay)
delays.extend(inner_delays)
if de & 1 == 0:
delays.append(inner_delay + 13)
return delays
def jump():
# SD 60139
delays = sd65122(50, 96, 3)
delays.append(3282)
delays += [2532] * 7 # Walking sound (SD 65088)
add_contention(delays, contention=False, interrupts=True)
return delays
def catapult():
# SD 65141, BTS 63861
delays = sd65122(128, 0, 248)
add_contention(delays, contention=False, interrupts=True)
return delays
def shield():
# SD 58604
return sd65122(64, 0, 254) * 16
def hit(cycle):
# SD 60128
delays = [2532] * 15
delays[7] = 2589
add_contention(delays, contention=False, interrupts=True, cycle=cycle)
return delays
def hit0():
return hit(17472)
def hit1():
return hit(17472 * 3)
def bingo():
# BTS 62178#62404
delays = bts62155(255, 255, 255)
delays += ([83] + delays) * 4
add_contention(delays, contention=False, interrupts=True)
return delays
def sherry():
# BTS 23907#23988
delays = bts62155(0, 0, 2)
add_contention(delays, contention=False, interrupts=True)
return delays
def knocked_out():
# SD 65111, BTS 62094#62147
delays = sd65122(0, 0, 1)
add_contention(delays, contention=False, interrupts=True)
return delays
def mouse():
# BTS 28952#28964
squeak = bts29836(26, 1632)
pause_delay = 399464 + squeak[0]
delays = squeak + ([pause_delay] + squeak[1:]) * 2
add_contention(delays, interrupts=True)
return delays
def conker():
# BTS 29896#29978
delays = bts29836(40, 10240)
add_contention(delays, interrupts=True)
return delays
def safe_key():
# BTS 30804#30853
delays = bts29836(1, 256)
for n in range(255, 0, -1):
subdelays = bts29836((n & 63) + 1, 256)
delays.append(119 + subdelays[0])
delays.extend(subdelays[1:])
add_contention(delays, interrupts=True)
return delays
def bts_bell():
# BTS 32433#32492
delays = bts29836(128, 4096)
add_contention(delays, interrupts=True)
return delays
def sd_bell():
# SD 26450
delays = [1718] * 4600
add_contention(delays)
return delays
def sd_lines1():
# SD 30464#30544
delays = []
inner_delays = [296] * 255
for d in range(39):
delays.extend(inner_delays)
delays.append(307)
delays.extend(inner_delays)
add_contention(delays, interrupts=True)
return delays
def sd_lines2():
# SD 30464#30575
delays = []
inner_delays = [686] * 255
for d in range(19):
delays.extend(inner_delays)
delays.append(697)
delays.extend(inner_delays)
add_contention(delays, interrupts=True)
return delays
def bts_lines1():
# BTS 29716#29790
delays = bts29836(20, 10240)
add_contention(delays, interrupts=True)
return delays
def bts_lines2():
# BTS 29716#29818
delays = bts29836(50, 5120)
add_contention(delays, interrupts=True)
return delays
def convert_notes(notes, offset=0, tempo=1):
data = []
for note_spec in notes.split():
elements = note_spec.split('-')
beats = int(elements[1]) / float(tempo)
if elements[0] == 'R':
datum = (beats, None, None)
else:
note = NOTES[elements[0]] + offset
silence = 1 if len(elements) < 3 else 0
datum = (int(beats * 4) - silence, note, silence)
data.append(datum)
return data
def tune(notes):
# SD 32279
delays = []
for i, (beats, note, silence) in enumerate(notes):
if note is None:
delays.append(int(beats * 60543.5))
else:
duration, pitch = PITCH_DATA[note]
duration *= beats
duration //= 2
if i:
gap = 207 + 13 * prev_pitch + 24 * beats
if silence:
gap += 61617
delays.append(gap)
delays.extend([13 * pitch + 51] * (duration - 1))
prev_pitch = pitch
add_contention(delays)
return delays
def sd_tune():
notes = ' '.join((
'C2-2 A2-1 B2-2 G1-1',
'C2-2 A2-1 F1-2 F1-1',
'G1-2 A2-1 B2-1-0 A2-1 G1-1',
'C2-2 A2-1 F1-3',
'C2-2 A2-1 B2-1 B2-1 G1-1',
'C2-2 A2-1 F1-3',
'G1-1 G1-1 A2-1 B2-1 A2-1 G1-1',
'C2-2 A2-1 F1-3'
))
return tune(convert_notes(notes))
def all_shields():
notes = ' '.join((
'B2-1 B2-1 B2-1 C2-1 D2-2 C2-2',
'B2-1 D2-1 C2-1 C2-1 B2-3 R-4',
'B2-1 B2-1 B2-1 C2-1 D2-2 C2- |
marcostx/BatCNN | mlp/mlp.py | Python | mit | 1,571 | 0.019096 | import sys
import numpy as np
from sklearn.model_selection import StratifiedKFold
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import f1_score,accuracy_score, recall_score, precision_score
import scipy
from random import shuffle
def load_dataset(filename):
f = open(filename)
x = []
y = []
for line in f:
v = line.rstrip('\n').split(',')
vf = [float(i) for i in v[:-1]]
x.append(vf)
y.append(float(v[-1]))
return x,y
def inductor(x,y):
clf = MLPClassifier(solver='lbfgs', alpha=1e-5,hidden_layer_sizes=(20, 8), max_iter=1000,random_state=1)
clf.fit(x,y)
return clf
if __name__ == '__main__':
fname = sys.argv[1]
print("loading data ..")
x,y = load_dataset(fname)
x = np.array(x)
y = np.array(y)
n = len(x)
kf = StratifiedKFold(n_splits=3, shuffle=True)
for train_index, test_index in kf.split(x,y):
shuffle(train_index)
shuffle(test_index)
xtrain = x[train_index]
ytrain = y[train_index]
xte | st = x[test_index]
ytest = y[test_index]
print("training ...")
clf = inductor(xtrain,ytrain)
print("predicting ...")
ypred = cl | f.predict(xtest)
print "(accuracy : %4.3f) "%(accuracy_score(ytest,ypred))
print "(f1 : %4.3f) "%(f1_score(ytest,ypred, average='weighted'))
print "(recall : %4.3f) "%(recall_score(ytest,ypred,average='weighted'))
print "(precision : %4.3f) "%(precision_score(ytest,ypred,average='weighted'))
|
wavii/listy-django-cache | listy/deterministic_cached_model.py | Python | mit | 18,356 | 0.004304 | import re
import time
import logging
import traceback
import collections
import cPickle as pickle
from datetime import datetime
from django.db import models
from django.db.models.query import QuerySet
from django.db.models.query_utils import deferred_class_factory
from django.conf import settings
from listy.list_cache import ListCache, NotCachableException
from listy.on_demand_pickle import OnDemandPickle
from listy.utils import dict_merge, filter_into_two, memoized
deferred_class_factory = memoized(deferred_class_factory)
if 'listy.middleware.RequestContextCacheMiddleware' in settings.MIDDLEWARE_CLASSES:
from listy.middleware import object_registry
else:
object_registry = collections.defaultdict(lambda: None)
USE_TUPLES_AS_MINI_OBJECTS = False
log = logging.getLogger(__name__)
class no_cache_flush:
"""
This exists so we can manually update the cache during updates
within the context of the caching mechanism and still cause
auto-flushes to occur when people are changing the cache outside
of the context of the caching mechanism (e.g. the admin panel).
The reason why we would want to update the cache ourselves when
possible is because we can do stuff more performant, such as
prepending to the list rather than just flushing and waiting for
the next access to update the cache.
"""
def __init__(self, backing_store):
# Sometimes we are called with some object that doesn't have a
# cache, that's ok, we can use ourself to set the attribute
# and not cause any harm.
self.cache = getattr(backing_store.manager.model, 'cache', self)
def __enter__(self):
self.cache.no_cache_flush = True
def __exit__(self, type, value, traceback):
self.cache.no_cache_flush = False
class CachingManager(models.Manager):
"""
This class adds caching to a model. Cached accesses will need to go
through the ListCache which is an attribute hanging off of the
model named 'cache'. ALL updates MUST go through that object. If
you try to update the cache through me you will get an exception.
You are allowed to do read queries through me but I'm going to log
a warning when you do naughty things like that.
The initialization arguments are exactly those of ListCache.
Inlined Foreign Keys
--------------------
If you provide the parameter inlined_foreign_key_fields to the
CachingManager it will cache the configured foreign key with the
data from the configured fields and it will construct deferred
fields for everything else after you get an object from the cache.
This means that if you regularly access only a subset of the
fields from a foreign key you don't need to cache the entire
object.
"""
use_for_related_fields = True
def __init__(self, *args, **kwargs):
super(CachingManager, self).__init__()
self.args = args
self.kwargs = kwargs
def contribute_to_class(self, model, name):
models.Manager.contribute_to_class(self, model, name)
model._rw_objects = self.__class__.__bases__[0]()
model._rw_objects.contribute_to_class | (model, na | me)
backing_store_class = self.kwargs.pop('django_backing_store_class', DjangoBackingStore)
inlined_foreign_key_fields = self.kwargs.pop('inlined_foreign_key_fields', {})
backing_store = backing_store_class(model._rw_objects, inlined_foreign_key_fields=inlined_foreign_key_fields)
model.cache = ListCache(backing_store, *self.args, **self.kwargs)
def shady_save(self, *args, **kwargs):
# This is shady. Very shady. We are swapping out the
# _base_manager of the model class before we save becuase we
# don't want it to use the caching manager which it will use
# by default when going through the save crap. The stuff we're
# trying to get around occurs in django.db.models.base:save_base().
self.__class__._base_manager = self.__class__._rw_objects
try:
r = super(model, self).save(*args, **kwargs)
if not getattr(model.cache, 'no_cache_flush', False):
model.cache.flush(model.cache.backing_store.kwargs_for_object(self, {}))
return r
finally:
self.__class__._base_manager = self.__class__.objects
model.save = shady_save
def get_query_set(self):
return CachingQuerySet(self.model)
class CachingQuerySet(QuerySet):
"""
QuerySet that uses cache only for primary key lookups. Falls back
to normal db lookups for everything else.
TODO: throw an exception if you are trying to update the db
through this.
Used the following blog posts as inspiration:
http://www.eflorenzano.com/blog/post/drop-dead-simple-django-caching/
http://lazypython.blogspot.com/2008_11_01_archive.html
"""
def filter(self, *args, **kwargs):
pk = None
for val in ('pk', 'pk__exact', 'id', 'id__exact'):
if val in kwargs:
pk = kwargs[val]
break
if pk is not None:
cache = getattr(self.model, 'cache', None)
if cache:
try:
result = None
registry = object_registry[self.model.__name__]
if registry is not None:
if pk in registry:
log.debug('(1) Got result from object registry for %s and %s', self.model.__name__, pk)
result = registry[pk]
if not result:
result = cache.get_one(pk=pk)
if result:
if registry:
log.debug('(2) Put result in object registry for %s and %s', self.model.__name__, pk)
registry[pk] = result
# _result_cache is a django internal, we are setting
# it so that later calls to this query set will pull
# from that.
self._result_cache = [result]
return self
except NotCachableException:
log.warn("")
return super(CachingQuerySet, self).filter(*args, **kwargs)
def get(self, *args, **kwargs):
clone = self.filter(*args, **kwargs)
# NOTE: It's possible that what I'm doing is really really
# bad, I just don't know because I don't understand the code
# well enough. In Django 1.1 the get function did what this
# get function does, but in Django 1.2 it added this two extra
# lines of code:
#
# if self.query.can_filter():
# clone = clone.order_by()
#
# Somehow this code seems to cause Django to return everything
# in the table instead of the query that we pulled out of the
# cache. I really have no idea why this would happen. But,
# because this seems like a harmless change I'm doing it.
num = len(clone)
if num == 1:
return clone._result_cache[0]
if not num:
raise self.model.DoesNotExist("%s matching query does not exist."
% self.model._meta.object_name)
raise self.model.MultipleObjectsReturned("get() returned more than one %s -- it returned %s! Lookup parameters were %s"
% (self.model._meta.object_name, num, kwargs))
class DjangoBackingStore(object):
"""
A ListCache backing store abstraction for Django models.
This class add deterministic caching to django models (i.e.,
caching that keeps the caches as in-sync with the database as
humanly possible). Instead of just supporting primary key lookups,
it also supports lists of objects. I've tried to optimize for
correctness.
"""
def __init__(self, manager, inlined_foreign_key_fields=None):
self.manager = manager
self.inlined_foreign_key_fields = inlined_foreign_key_fields or {}
def name(self):
" |
douglasbagnall/nze-vox | voxutils/paths.py | Python | mit | 671 | 0 | from os.path import dirname, join, abspath
ROOT = dirname(dirname(abspath(__file__)))
DICT = join(ROOT, 'dict')
DICT_NONFREE = join(ROOT, 'dict', 'non-free')
CORPORA_DIR = join(ROOT, 'corpora')
RESAMPLED_16K_DIR = join(CORPORA_DIR, '16k')
SUBCORPUS_DIR = join(CORPORA_DIR, 'resampled')
IGNORED_CORPORA = ['resampled', '16k']
BASE_CORPORA = [' | voxforge', 'wellington', 'hansard']
CORPORA = {x: join(CORPORA_DIR, x) for x in BASE_CORPORA}
CMUDICT = join(DICT, 'cmudict.0.7a')
UNISYN_DICT = join(DICT_NONFREE, 'unisyn-nz.txt')
VOXFORGE_DICT = join(DICT, 'VoxForgeDict')
BEEP_DICT = join(DICT_NONFREE, 'beep | ', 'beep-1.0')
ESPEAK_DICT = join(DICT, 'espeak-corpus+50k.txt')
|
montyly/manticore | tests/ethereum/EVM/test_EVMGAS.py | Python | apache-2.0 | 1,877 | 0.001066 | import struct
import unittest
import json
from manticore.platforms import evm
from manticore.core import state
from manticore.core.smtlib import Operators, ConstraintSet
import os
class EVMTest_GAS(unittest.TestCase):
_multiprocess_can_split_ = True
maxDiff = None
def _execute(self, new_vm):
last_returned = None
last_exception = None
try:
| new_vm.execute()
except evm.Stop as e:
last_exception = "STOP"
except evm.NotEnoughGas:
last_exception = "OOG"
except evm.StackUnderflow:
last_exception = "INSUFFICIENT STACK"
except evm.InvalidOpcode:
last_exceptio | n = "INVALID"
except evm.SelfDestruct:
last_exception = "SUICIDED"
except evm.Return as e:
last_exception = "RETURN"
last_returned = e.data
except evm.Revert:
last_exception = "REVERT"
return last_exception, last_returned
def test_GAS_1(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"Z"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [999998])
if __name__ == "__main__":
unittest.main()
|
ghackebeil/PyORAM | examples/encrypted_storage_sftp.py | Python | mit | 3,559 | 0.001405 | #
# This example measures the performance of encrypted storage
# access through an SSH client using the Secure File
# Transfer Protocol (SFTP).
#
# In order to run this example, you must provide a host
# (server) address along with valid login credentials
#
import os
import random
import time
import pyoram
from pyoram.util.misc import MemorySize
from pyoram.encrypted_storage.encrypted_block_storage import \
EncryptedBlockStorage
import paramiko
import tqdm
pyoram.config.SHOW_PROGRESS_BAR = True
# Set SSH login credentials here
# (by default, we pull these from the environment
# for testing purposes)
ssh_host = os.environ.get('PYORAM_SSH_TEST_HOST')
ssh_username = os.environ.get('PYORAM_SSH_TEST_USERNAME')
ssh_password = os.environ.get('PYORAM_SSH_TEST_PASSWORD')
# Set the storage location and size
storage_name = "heap.bin"
# 4KB block size
block_size = 4000
# one block per bucket in the
# storage heap of height 8
block_count = 2**(8+1)-1
def main():
print("Storage Name: %s" % (storage_name))
print("Block Count: %s" % (block_count))
print("Block Size: %s" % (MemorySize(block_size)))
print("Total Memory: %s"
% (MemorySize(block_size*block_count)))
print("Actual Storage Required: %s"
% (MemorySize(
EncryptedBlockStorage.compute_storage_size(
block_size,
block_count,
storage_type='sftp'))))
print("")
# Start an SSH client using paramiko
print("Starting SSH Client")
with paramiko.SSHClient() as ssh:
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.load_system_host_keys()
ssh.connect(ssh_host,
username=ssh_username,
password=ssh_password)
print("Setting Up Encrypted Block Storage")
setup_start = time.time()
with EncryptedBlockStorage.setup(storage | _name,
block_size,
block_count,
storage_type='sftp',
sshclient=ssh,
ignore_existing=True) as f:
print("Total Setup | Time: %2.f s"
% (time.time()-setup_start))
print("Total Data Transmission: %s"
% (MemorySize(f.bytes_sent + f.bytes_received)))
print("")
# We close the device and reopen it after
# setup to reset the bytes sent and bytes
# received stats.
with EncryptedBlockStorage(storage_name,
key=f.key,
storage_type='sftp',
sshclient=ssh) as f:
test_count = 1000
start_time = time.time()
for t in tqdm.tqdm(list(range(test_count)),
desc="Running I/O Performance Test"):
f.read_block(random.randint(0,f.block_count-1))
stop_time = time.time()
print("Access Block Avg. Data Transmitted: %s (%.3fx)"
% (MemorySize((f.bytes_sent + f.bytes_received)/float(test_count)),
(f.bytes_sent + f.bytes_received)/float(test_count)/float(block_size)))
print("Access Block Avg. Latency: %.2f ms"
% ((stop_time-start_time)/float(test_count)*1000))
print("")
if __name__ == "__main__":
main() # pragma: no cover
|
sridevikoushik31/openstack | nova/tests/test_metadata.py | Python | apache-2.0 | 22,819 | 0.000614 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for metadata service."""
import base64
import copy
import hashlib
import hmac
import json
import re
from oslo.config import cfg
import webob
from nova.api.metadata import base
from nova.api.metadata import handler
from nova.api.metadata import password
from nova import block_device
from nova.conductor import api as conductor_api
from nova import db
from nova.db.sqlalchemy import api
from nova import exception
from nova.network import api as network_api
from nova import test
from nova.tests import fake_network
CONF = cfg.CONF
USER_DATA_STRING = ("This is an encoded string")
ENCODE_USER_DATA_STRING = base64.b64encode(USER_DATA_STRING)
INSTANCES = (
{'id': 1,
'uuid': 'b65cee2f-8c69-4aeb-be2f-f79742548fc2',
'name': 'fake',
'project_id': 'test',
'key_name': "mykey",
'key_data': "ssh-rsa AAAAB3Nzai....N3NtHw== someuser@somehost",
'host': 'test',
'launch_index': 1,
'instance_type': {'name': 'm1.tiny'},
'reservation_id': 'r-xxxxxxxx',
'user_data': ENCODE_USER_DATA_STRING,
'image_ref': 7,
'vcpus': 1,
'fixed_ips': [],
'root_device_name': '/dev/sda1',
'info_cache': {'network_info': []},
'hostname': 'test.novadomain',
'display_name': 'my_displayname',
},
)
def return_non_existing_address(*args, **kwarg):
raise exception.NotFound()
def fake_InstanceMetadata(stubs, inst_data, address=None,
sgroups=None, content=[], extra_md={}):
if sgroups is None:
sgroups = [{'name': 'default'}]
def sg_get(*args, **kwargs):
return sgroups
stubs.Set(api, 'security_group_get_by_instance', sg_get)
return base.InstanceMetadata(inst_data, address=address,
content=content, extra_md=extra_md)
def fake_request(stubs, mdinst, relpath, address="127.0.0.1",
fake_get_metadata=None, headers=None,
fake_get_metadata_by_instance_id=None):
def get_metadata_by_remote_address(address):
return mdinst
app = handler.MetadataRequestHandler()
if fake_get_metadata is None:
fake_get_metadata = get_metadata_by_remote_address
if stubs:
stubs.Set(app, 'get_metadata_by_remote_address', fake_get_metadata)
if fake_get_metadata_by_instance_id:
stubs.Set(app, 'get_metadata_by_instance_id',
fake_get_metadata_by_instance_id)
request = webob.Request.blank(relpath)
request.remote_addr = address
if headers is not None:
request.headers.update(headers)
response = request.get_response(app)
return response
class MetadataTestCase(test.TestCase):
def setUp(self):
super(MetadataTestCase, self).setUp()
self.instance = INSTANCES[0]
self.flags(use_local=True, group='conductor')
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs,
spectacular=True)
def test_user_data(self):
inst = copy.copy(self.instance)
inst['user_data'] = base64.b64encode("happy")
md = fake_InstanceMetadata(self.stubs, inst)
self.assertEqual(
md.get_ec2_metadata(version='2009-04-04')['user-data'], "happy")
def test_no_user_data(self):
inst = copy.copy(self.instance)
del inst['user_data']
md = fake_InstanceMetadata(self.stubs, inst)
obj = object()
self.assertEqual(
md.get_ec2_metadata(version='2009-04-04').get('user-data', obj),
obj)
def test_security_groups(self):
inst = copy.copy(self.instance)
sgroups = [{'name': 'default'}, {'name': 'other'}]
expected = ['default', 'other']
md = fake_InstanceMetadata(self.stubs, inst, sgroups=sgroups)
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(data['meta-data']['security-groups'], expected)
def test_local_hostname_fqdn(self):
md = fake_InstanceMetadata(self.stubs, copy.copy(self.instance))
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(data['meta-data']['local-hostname'],
"%s.%s" % (self.instance['hostname'], CONF.dhcp_domain))
def test_format_instance_mapping(self):
# Make sure that _format_instance_mappings works.
ctxt = None
instance_ref0 = {'id': 0,
'uuid': 'e5fe5518-0288-4fa3-b0c4-c79764101b85',
'root_device_name': None}
instance_ref1 = {'id': 0,
'uuid': 'b65cee2f-8c69-4aeb-be2f-f79742548fc2',
'root_device_name': '/dev/sda1'}
def fake_bdm_get(ctxt, uuid):
return [{'volume_id': 87654321,
'snapshot_id': None,
'no_device': None,
'virtual_name': None,
'delete_on_termination': True,
'device_name': '/dev/sdh'},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'virtual_name': 'swap',
'delete_on_termination': None,
'device_name': '/dev/sdc'},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
| 'virtual_name': 'ephemeral0',
'delete_on_termination': None,
'device_name': '/dev/sdb'}]
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_bdm_get)
expected = {'ami': 'sda1',
'root': '/dev/sda1',
'eph | emeral0': '/dev/sdb',
'swap': '/dev/sdc',
'ebs0': '/dev/sdh'}
capi = conductor_api.LocalAPI()
self.assertEqual(base._format_instance_mapping(capi, ctxt,
instance_ref0), block_device._DEFAULT_MAPPINGS)
self.assertEqual(base._format_instance_mapping(capi, ctxt,
instance_ref1), expected)
def test_pubkey(self):
md = fake_InstanceMetadata(self.stubs, copy.copy(self.instance))
pubkey_ent = md.lookup("/2009-04-04/meta-data/public-keys")
self.assertEqual(base.ec2_md_print(pubkey_ent),
"0=%s" % self.instance['key_name'])
self.assertEqual(base.ec2_md_print(pubkey_ent['0']['openssh-key']),
self.instance['key_data'])
def test_image_type_ramdisk(self):
inst = copy.copy(self.instance)
inst['ramdisk_id'] = 'ari-853667c0'
md = fake_InstanceMetadata(self.stubs, inst)
data = md.lookup("/latest/meta-data/ramdisk-id")
self.assertTrue(data is not None)
self.assertTrue(re.match('ari-[0-9a-f]{8}', data))
def test_image_type_kernel(self):
inst = copy.copy(self.instance)
inst['kernel_id'] = 'aki-c2e26ff2'
md = fake_InstanceMetadata(self.stubs, inst)
data = md.lookup("/2009-04-04/meta-data/kernel-id")
self.assertTrue(re.match('aki-[0-9a-f]{8}', data))
self.assertEqual(
md.lookup("/ec2/2009-04-04/meta-data/kernel-id"), data)
del inst['kernel_id']
md = fake_InstanceMetadata(self.stubs, inst)
self.assertRaises(base.InvalidMetadataPath,
md.lookup, "/2009-04-04/meta-data/k |
karellodewijk/wottactics | extra/download_hots_map/dragon-shire/download_hots_map.py | Python | mit | 1,370 | 0.026277 | import os;
link = "http://media.blizzard.com/heroes/images/battlegrounds/maps/dragon-shire/main/6/"
column = 0;
rc_column = 0;
while (rc_column == 0):
row = 0;
rc_column = os.system('wget ' + link + str(column) + '/' + str(row) + '.jpg -O ' + str(1000 + column) + '-' + str(1000 + row) + '.jpg')
rc_row = rc_column
while (rc_row == 0):
row += 1
rc_row = os.system('wget ' + link + str(column) + '/' + str(row) + '.jpg -O ' + str(1000 + column) + '-' + str(1000 + row) + '.jpg')
column += 1
p = os.popen('ls -1 *.jpg | tail -n2');
second_last_file = p.readline | ();
last_file = p.readline();
column_end = last_file[0:4]
row_end = second_last_file[5:9]
print column_end
print row_end
os.system('rm ' + column_end + '*');
os.system('rm *-' + row_end + '.jpg');
column_end = int(column_end) - 1000;
row_end = int(row_end) - 1000;
os.system('mkdir temp')
i = 0;
for r in range(0, r | ow_end):
for c in range(0, column_end):
file_to_move = str(1000 + c) + '-' + str(1000 + row_end - r - 1) + '.jpg'
os.system('cp ' + file_to_move + ' ./temp/' + str(100000 + i) + '.jpg');
i += 1
os.system('montage ./temp/*.jpg -tile ' + str(column_end) + 'x' + str(row_end) + ' -geometry +0+0 result.png');
os.system('montage ./temp/*.jpg -tile ' + str(column_end) + 'x' + str(row_end) + ' -geometry +0+0 result.jpg');
os.system('rm temp -r');
os.system('rm 1*.jpg');
|
paplorinc/intellij-community | python/testData/paramInfo/TypingCallableWithKnownParameters.py | Python | apache-2.0 | 104 | 0.019231 | from t | yping import Callable
def f() -> Callable[[int, str], int]:
pass
|
c = f()
print(c(<arg1>))
|
gregrperkins/closure-library | closure/bin/build/jscompiler_test.py | Python | apache-2.0 | 2,516 | 0.002782 | #!/usr/bin/env python
#
# Copyright 2013 | The Closure Library Authors. All Rights Reserved.
#
# Licensed u | nder the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit test for depstree."""
__author__ = 'nnaze@google.com (Nathan Naze)'
import unittest
import jscompiler
class JsCompilerTestCase(unittest.TestCase):
"""Unit tests for jscompiler module."""
def testGetJsCompilerArgs(self):
args = jscompiler._GetJsCompilerArgs(
'path/to/jscompiler.jar',
1.6,
['path/to/src1.js', 'path/to/src2.js'],
['--test_jvm_flag'],
['--test_compiler_flag']
)
self.assertEqual(
['java', '-client', '--test_jvm_flag',
'-jar', 'path/to/jscompiler.jar',
'--js', 'path/to/src1.js',
'--js', 'path/to/src2.js', '--test_compiler_flag'],
args)
args = jscompiler._GetJsCompilerArgs(
'path/to/jscompiler.jar',
1.7,
['path/to/src1.js', 'path/to/src2.js'],
['--test_jvm_flag'],
['--test_compiler_flag'])
self.assertEqual(
['java', '-d32', '-client', '--test_jvm_flag',
'-jar', 'path/to/jscompiler.jar',
'--js', 'path/to/src1.js',
'--js', 'path/to/src2.js',
'--test_compiler_flag'],
args)
self.assertRaises(
jscompiler.JsCompilerError,
lambda: jscompiler._GetJsCompilerArgs(
'path/to/jscompiler.jar',
1.5,
['path/to/src1.js', 'path/to/src2.js'],
['--test_jvm_flag'],
['--test_compiler_flag']))
def testGetJavaVersion(self):
def assertVersion(expected, version_string):
self.assertEquals(expected, version_string)
assertVersion(1.7, _TEST_JAVA_VERSION_STRING)
assertVersion(1.4, 'java version "1.4.0_03-ea"')
_TEST_JAVA_VERSION_STRING = """\
openjdk version "1.7.0-google-v5"
OpenJDK Runtime Environment (build 1.7.0-google-v5-64327-39803485)
OpenJDK Server VM (build 22.0-b10, mixed mode)
"""
if __name__ == '__main__':
unittest.main()
|
winstonschroeder77/fitch- | fitch_app/webui/fitch/fitch/wsgi.py | Python | gpl-3.0 | 385 | 0.002597 | """
WSGI config for fitch project.
It exposes the WSGI callable as a module-level variable na | med ``application``.
For mor | e information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fitch.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
tanonev/codewebs | src/analogyFinder/src/daemons/example/exampleclient.py | Python | mit | 1,439 | 0.013899 | #! /usr/bin/env python
import json
import sys
sys.path.append('..')
from CodewebsIndexClient import CodewebsIndexClient
def loadTextFile(fname):
with open(fname) as fid:
return fid.read()
def wrap(ast,code,map,codeblockid):
astjson = json.loads(ast)
wrappedJSON = {'ast': astjson,
'code': code,
'map': map,
'codeblockid': codeblockid,
'startline': 4,
'startlineindex': 20,
'endline': 4,
'endlinei | ndex': 28,
'querytype': 3}
| #return json.dumps(wrappedJSON,sort_keys = True,indent=4,separators=(',',': '))
return json.dumps(wrappedJSON)
def run():
codeblockid = 30
asttext = loadTextFile('ast.json')
codetext = loadTextFile('code')
maptext = loadTextFile('map')
inputJSON = wrap(asttext,codetext,maptext,codeblockid)
asttext_bad = loadTextFile('ast_pinv.json')
codetext_bad = loadTextFile('code_pinv')
maptext_bad = loadTextFile('map_pinv')
inputJSON_bad = wrap(asttext_bad,codetext_bad,maptext_bad,codeblockid)
cwindex = CodewebsIndexClient()
print " [x] Requesting!"
response = cwindex.call(inputJSON)
print " [.] Got %r" % (response,)
print " [x] Requesting!"
response = cwindex.call(inputJSON_bad)
print " [.] Got %r" % (response,)
if __name__ == '__main__':
run()
|
cybercomgroup/Big_Data | Cloudera/Code/Titanic_Dataset/title_surv.py | Python | gpl-3.0 | 1,344 | 0.040923 | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# Seperate titles, French -> english, others to rare.
def engTitle(title):
if title in ["Miss", "Mrs", "Mr", "Dr", "Master"]:
return title
elif title in ["Mme", "Ms"]:
return "Mrs"
elif title == "Mlle":
return "Miss"
else:
return "Rare" #Include Major, Sir, the Countess, Lady, Jonkheer, Rev etc
def getTitleFromName(name):
name = name.split(",")
name = name[1 | ].split(".")
return en | gTitle( name[0].strip() )
df = pd.read_csv("train.csv")
df["Title"] = df.apply(lambda row: getTitleFromName(row["Name"]), axis = 1)
titles = df["Title"].unique()
index = np.arange( len(titles) )
opacity = 0.5
bar_width = 0.3
total = []
survived = []
for title in titles:
t_all = df[ df["Title"] == title ]
total.append( len(t_all) )
survived.append( len( t_all[ t_all["Survived"] == 1] ) )
for i in range(0, len(titles)):
s = titles[i] + "\t-\t tot: " + str(total[i]) + ", surv: " + str(survived[i]) + ", ratio: " + str(survived[i] / total[i])
print(s)
plt.bar(index, tuple(total), bar_width, alpha = opacity, color = 'b', label = "Total" )
plt.bar(index + bar_width, tuple(survived), bar_width, alpha = opacity, color = 'r', label = "Survived" )
plt.xlabel("Title")
plt.ylabel("Count")
plt.legend()
plt.xticks(index + bar_width / 2, tuple(titles) )
plt.show()
|
googleapis/python-contact-center-insights | setup.py | Python | apache-2.0 | 2,576 | 0.000388 | # -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or | agreed to in writing, software
# distributed under the License | is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
import setuptools
name = "google-cloud-contact-center-insights"
description = "Contact Center AI Insights API"
version = "1.3.1"
release_status = "Development Status :: 5 - Production/Stable"
url = "https://github.com/googleapis/python-contact-center-insights"
dependencies = [
# NOTE: Maintainers, please do not require google-api-core>=2.x.x
# Until this issue is closed
# https://github.com/googleapis/google-cloud-python/issues/10566
"google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0",
"proto-plus >= 1.15.0",
]
package_root = os.path.abspath(os.path.dirname(__file__))
readme_filename = os.path.join(package_root, "README.rst")
with io.open(readme_filename, encoding="utf-8") as readme_file:
readme = readme_file.read()
packages = [
package
for package in setuptools.PEP420PackageFinder.find()
if package.startswith("google")
]
namespaces = ["google"]
if "google.cloud" in packages:
namespaces.append("google.cloud")
setuptools.setup(
name=name,
version=version,
description=description,
long_description=readme,
author="Google LLC",
author_email="googleapis-packages@google.com",
license="Apache 2.0",
url=url,
classifiers=[
release_status,
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Operating System :: OS Independent",
"Topic :: Internet",
],
platforms="Posix; MacOS X; Windows",
packages=packages,
python_requires=">=3.6",
namespace_packages=namespaces,
install_requires=dependencies,
include_package_data=True,
zip_safe=False,
)
|
baverman/snaked | snaked/signals/util.py | Python | mit | 375 | 0.008 | def app | end_attr(obj, attr, value):
"""
Appends value to object attribute
Attribute may be undefined
For example:
append_attr(obj, 'test', 1)
append_attr(obj, 'test', 2)
assert obj.test == [1, 2]
"""
try:
getattr(obj, attr).append(value)
except AttributeError:
setattr(obj, attr, [val | ue])
|
hainm/open-forcefield-group | ideas/bayesian-gbsa-parameterization/evaluate-gbsa.py | Python | gpl-2.0 | 24,285 | 0.010089 | #!/usr/bin/env python
#=============================================================================================
# MODULE DOCSTRING
#=============================================================================================
"""
evaluate-gbsa.py
Evaluate the GBSA model on hydration free energies of small molecules for multiple iterations of the Markov chain.
"""
#=============================================================================================
# GLOBAL IMPORTS
#=============================================================================================
import sys,string
from openeye.oechem import *
from optparse import OptionParser # For parsing of command line arguments
import os
import math
import numpy
import simtk.openmm as openmm
import simtk.unit as units
import openeye.oechem
import openeye.oequacpac
import openeye.oeiupac
from openeye.oechem import *
from openeye.oequacpac import *
from openeye.oeszybki import *
from openeye.oeiupac import *
import time
import pymc
#=============================================================================================
# Load OpenMM plugins.
#=============================================================================================
print "Loading OpenMM plugins..."
openmm.Platform.loadPluginsFromDirectory(os.path.join(os.environ['OPENMM_INSTALL_DIR'], 'lib'))
openmm.Platform.loadPluginsFromDirectory(os.path.join(os.environ['OPENMM_INSTALL_DIR'], 'lib', 'plugins'))
#=============================================================================================
# Atom Typer
#=============================================================================================
class AtomTyper(object):
"""
Atom typer
Based on 'Patty', by Pat Walters.
"""
class TypingException(Exception):
"""
Atom typing exception.
"""
def __init__(self, molecule, atom):
self.molecule = molecule
self.atom = atom
def __str__(self):
return "Atom not assigned: %6d %8s" % (self.atom.GetIdx(), OEGetAtomicSymbol(self.atom.GetAtomicNum()))
def __init__(self, infileName, tagname):
self.pattyTag = OEGetTag(tagname)
self.smartsList = []
ifs = open(infileName)
lines = ifs.readlines()
for line in lines:
# Strip trailing comments
index = line.find('%')
if index != -1:
line = line[0:index]
# Split into tokens.
toks = string.split(line)
if len(toks) == 2:
smarts,type = toks
pat = OESubSearch()
pat.Init(smarts)
pat.SetMaxMatches(0)
self.smartsList.append([pat,type,smarts])
def dump(self):
for pat,type,smarts in self.smartsList:
print pat,type,smarts
def assignTypes(self,mol):
# Assign null types.
for atom in mol.GetAtoms():
atom.SetStringData(self.pattyT | ag, "")
# Assign atom types using rules.
OEAssignAromaticFlags(mol)
for pat,type,smarts in self.smartsList:
for matchbase in pat.Match(mol):
for matchpair in matchbase.GetAtoms():
matchpair.target.SetStringData(self.pattyTag,type)
# Check if any atoms remain unassigned.
for atom in mol.GetAtoms():
| if atom.GetStringData(self.pattyTag)=="":
raise AtomTyper.TypingException(mol, atom)
def debugTypes(self,mol):
for atom in mol.GetAtoms():
print "%6d %8s %8s" % (atom.GetIdx(),OEGetAtomicSymbol(atom.GetAtomicNum()),atom.GetStringData(self.pattyTag))
def getTypeList(self,mol):
typeList = []
for atom in mol.GetAtoms():
typeList.append(atom.GetStringData(self.pattyTag))
return typeList
#=============================================================================================
# Utility routines
#=============================================================================================
def read_gbsa_parameters(filename):
"""
Read a GBSA parameter set from a file.
ARGUMENTS
filename (string) - the filename to read parameters from
RETURNS
parameters (dict) - parameters[(atomtype,parameter_name)] contains the dimensionless parameter
"""
parameters = dict()
infile = open(filename, 'r')
for line in infile:
# Strip trailing comments
index = line.find('%')
if index != -1:
line = line[0:index]
# Parse parameters
elements = line.split()
if len(elements) == 3:
[atomtype, radius, scalingFactor] = elements
parameters['%s_%s' % (atomtype,'radius')] = float(radius)
parameters['%s_%s' % (atomtype,'scalingFactor')] = float(scalingFactor)
return parameters
#=============================================================================================
# Computation of hydration free energies
#=============================================================================================
def function(x):
(molecule, parameters) = x
return compute_hydration_energy(molecule, parameters)
def compute_hydration_energies_parallel(molecules, parameters):
import multiprocessing
# Create processor pool.
nprocs = 8
pool = multiprocessing.Pool(processes=nprocs)
x = list()
for molecule in molecules:
x.append( (molecule, parameters) )
# Distribute calculation.
results = pool.map(function, x)
return results
def compute_hydration_energies(molecules, parameters):
"""
Compute solvation energies of all specified molecules using given parameter set.
ARGUMENTS
molecules (list of OEMol) - molecules with atom types
parameters (dict) - parameters for atom types
RETURNS
energies (dict) - energies[molecule] is the computed solvation energy of given molecule
"""
energies = dict() # energies[index] is the computed solvation energy of molecules[index]
platform = openmm.Platform.getPlatformByName("Reference")
for molecule in molecules:
# Create OpenMM System.
system = openmm.System()
for atom in molecule.GetAtoms():
mass = OEGetDefaultMass(atom.GetAtomicNum())
system.addParticle(mass * units.amu)
# Add nonbonded term.
# nonbonded_force = openmm.NonbondedSoftcoreForce()
# nonbonded_force.setNonbondedMethod(openmm.NonbondedForce.NoCutoff)
# for atom in molecule.GetAtoms():
# charge = 0.0 * units.elementary_charge
# sigma = 1.0 * units.angstrom
# epsilon = 0.0 * units.kilocalories_per_mole
# nonbonded_force.addParticle(charge, sigma, epsilon)
# system.addForce(nonbonded_force)
# Add GBSA term
gbsa_force = openmm.GBSAOBCForce()
gbsa_force.setNonbondedMethod(openmm.GBSAOBCForce.NoCutoff) # set no cutoff
gbsa_force.setSoluteDielectric(1)
gbsa_force.setSolventDielectric(78)
# Build indexable list of atoms.
atoms = [atom for atom in molecule.GetAtoms()]
# Assign GBSA parameters.
for atom in molecule.GetAtoms():
atomtype = atom.GetStringData("gbsa_type") # GB atomtype
charge = atom.GetPartialCharge() * units.elementary_charge
radius = parameters['%s_%s' % (atomtype, 'radius')] * units.angstroms
scalingFactor = parameters['%s_%s' % (atomtype, 'scalingFactor')] * units.kilocalories_per_mole
lambda_ = 1.0 # fully interacting
gbsa_force.addParticle(charge, radius, scalingFactor)
# Add the force to the system.
system.addForce(gbsa_force)
# Build coordinate array.
natoms = len(atoms)
coordinates = units.Quantity(numpy.zeros([natoms, 3]), units.angstroms)
for (index,atom) in enumerate(a |
RevansChen/online-judge | Codewars/7kyu/shortest-word/Python/solution1.py | Python | mit | 69 | 0.014493 | # Python - 3.6.0
find_short = lambda | s: min(map(len, s.split | (' ')))
|
kernevil/samba | source4/torture/drs/python/getnc_exop.py | Python | gpl-3.0 | 50,933 | 0.001492 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Tests various schema replication scenarios
#
# Copyright (C) Kamen Mazdrashki <kamenim@samba.org> 2011
# Copyright (C) Andrew Bartlett <abartlet@samba.org> 2016
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be u | seful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# Usage:
# export DC1=dc1_dns_name
# export DC2=dc2_dns_name
# export SUBUNITRUN=$samba4srcdir/scripting/bin/subunitrun
# PYTH | ONPATH="$PYTHONPATH:$samba4srcdir/torture/drs/python" $SUBUNITRUN getnc_exop -U"$DOMAIN/$DC_USERNAME"%"$DC_PASSWORD"
#
import random
import drs_base
from drs_base import AbstractLink
import samba.tests
from samba import werror, WERRORError
import ldb
from ldb import SCOPE_BASE
from samba.dcerpc import drsuapi, misc, drsblobs
from samba.drs_utils import drs_DsBind
from samba.ndr import ndr_unpack, ndr_pack
from functools import cmp_to_key
from samba.common import cmp
def _linked_attribute_compare(la1, la2):
"""See CompareLinks() in MS-DRSR section 4.1.10.5.17"""
la1, la1_target = la1
la2, la2_target = la2
# Ascending host object GUID
c = cmp(ndr_pack(la1.identifier.guid), ndr_pack(la2.identifier.guid))
if c != 0:
return c
# Ascending attribute ID
if la1.attid != la2.attid:
return -1 if la1.attid < la2.attid else 1
la1_active = la1.flags & drsuapi.DRSUAPI_DS_LINKED_ATTRIBUTE_FLAG_ACTIVE
la2_active = la2.flags & drsuapi.DRSUAPI_DS_LINKED_ATTRIBUTE_FLAG_ACTIVE
# Ascending 'is present'
if la1_active != la2_active:
return 1 if la1_active else -1
# Ascending target object GUID
return cmp(ndr_pack(la1_target), ndr_pack(la2_target))
class DrsReplicaSyncTestCase(drs_base.DrsBaseTestCase):
"""Intended as a semi-black box test case for DsGetNCChanges
implementation for extended operations. It should be testing
how DsGetNCChanges handles different input params (mostly invalid).
Final goal is to make DsGetNCChanges as binary compatible to
Windows implementation as possible"""
def setUp(self):
super(DrsReplicaSyncTestCase, self).setUp()
self.base_dn = self.ldb_dc1.get_default_basedn()
self.ou = "OU=test_getncchanges,%s" % self.base_dn
self.ldb_dc1.add({
"dn": self.ou,
"objectclass": "organizationalUnit"})
(self.drs, self.drs_handle) = self._ds_bind(self.dnsname_dc1)
(self.default_hwm, self.default_utdv) = self._get_highest_hwm_utdv(self.ldb_dc1)
def tearDown(self):
try:
self.ldb_dc1.delete(self.ou, ["tree_delete:1"])
except ldb.LdbError as e:
(enum, string) = e.args
if enum == ldb.ERR_NO_SUCH_OBJECT:
pass
super(DrsReplicaSyncTestCase, self).tearDown()
def _determine_fSMORoleOwner(self, fsmo_obj_dn):
"""Returns (owner, not_owner) pair where:
owner: dns name for FSMO owner
not_owner: dns name for DC not owning the FSMO"""
# collect info to return later
fsmo_info_1 = {"dns_name": self.dnsname_dc1,
"invocation_id": self.ldb_dc1.get_invocation_id(),
"ntds_guid": self.ldb_dc1.get_ntds_GUID(),
"server_dn": self.ldb_dc1.get_serverName()}
fsmo_info_2 = {"dns_name": self.dnsname_dc2,
"invocation_id": self.ldb_dc2.get_invocation_id(),
"ntds_guid": self.ldb_dc2.get_ntds_GUID(),
"server_dn": self.ldb_dc2.get_serverName()}
msgs = self.ldb_dc1.search(scope=ldb.SCOPE_BASE, base=fsmo_info_1["server_dn"], attrs=["serverReference"])
fsmo_info_1["server_acct_dn"] = ldb.Dn(self.ldb_dc1, msgs[0]["serverReference"][0].decode('utf8'))
fsmo_info_1["rid_set_dn"] = ldb.Dn(self.ldb_dc1, "CN=RID Set") + fsmo_info_1["server_acct_dn"]
msgs = self.ldb_dc2.search(scope=ldb.SCOPE_BASE, base=fsmo_info_2["server_dn"], attrs=["serverReference"])
fsmo_info_2["server_acct_dn"] = ldb.Dn(self.ldb_dc2, msgs[0]["serverReference"][0].decode('utf8'))
fsmo_info_2["rid_set_dn"] = ldb.Dn(self.ldb_dc2, "CN=RID Set") + fsmo_info_2["server_acct_dn"]
# determine the owner dc
res = self.ldb_dc1.search(fsmo_obj_dn,
scope=SCOPE_BASE, attrs=["fSMORoleOwner"])
assert len(res) == 1, "Only one fSMORoleOwner value expected for %s!" % fsmo_obj_dn
fsmo_owner = res[0]["fSMORoleOwner"][0]
if fsmo_owner == self.info_dc1["dsServiceName"][0]:
return (fsmo_info_1, fsmo_info_2)
return (fsmo_info_2, fsmo_info_1)
def _check_exop_failed(self, ctr6, expected_failure):
self.assertEqual(ctr6.extended_ret, expected_failure)
#self.assertEqual(ctr6.object_count, 0)
#self.assertEqual(ctr6.first_object, None)
self.assertEqual(ctr6.more_data, False)
self.assertEqual(ctr6.nc_object_count, 0)
self.assertEqual(ctr6.nc_linked_attributes_count, 0)
self.assertEqual(ctr6.linked_attributes_count, 0)
self.assertEqual(ctr6.linked_attributes, [])
self.assertEqual(ctr6.drs_error[0], 0)
def test_do_single_repl(self):
"""
Make sure that DRSUAPI_EXOP_REPL_OBJ never replicates more than
one object, even when we use DRS_GET_ANC/GET_TGT.
"""
ou1 = "OU=get_anc1,%s" % self.ou
self.ldb_dc1.add({
"dn": ou1,
"objectclass": "organizationalUnit"
})
ou1_id = self._get_identifier(self.ldb_dc1, ou1)
ou2 = "OU=get_anc2,%s" % ou1
self.ldb_dc1.add({
"dn": ou2,
"objectclass": "organizationalUnit"
})
ou2_id = self._get_identifier(self.ldb_dc1, ou2)
dc3 = "CN=test_anc_dc_%u,%s" % (random.randint(0, 4294967295), ou2)
self.ldb_dc1.add({
"dn": dc3,
"objectclass": "computer",
"userAccountControl": "%d" % (samba.dsdb.UF_ACCOUNTDISABLE | samba.dsdb.UF_SERVER_TRUST_ACCOUNT)
})
dc3_id = self._get_identifier(self.ldb_dc1, dc3)
# Add some linked attributes (for checking GET_TGT behaviour)
m = ldb.Message()
m.dn = ldb.Dn(self.ldb_dc2, ou1)
m["managedBy"] = ldb.MessageElement(ou2, ldb.FLAG_MOD_ADD, "managedBy")
self.ldb_dc1.modify(m)
ou1_link = AbstractLink(drsuapi.DRSUAPI_ATTID_managedBy,
drsuapi.DRSUAPI_DS_LINKED_ATTRIBUTE_FLAG_ACTIVE,
ou1_id.guid, ou2_id.guid)
m.dn = ldb.Dn(self.ldb_dc2, dc3)
m["managedBy"] = ldb.MessageElement(ou2, ldb.FLAG_MOD_ADD, "managedBy")
self.ldb_dc1.modify(m)
dc3_link = AbstractLink(drsuapi.DRSUAPI_ATTID_managedBy,
drsuapi.DRSUAPI_DS_LINKED_ATTRIBUTE_FLAG_ACTIVE,
dc3_id.guid, ou2_id.guid)
req = self._getnc_req10(dest_dsa=None,
invocation_id=self.ldb_dc1.get_invocation_id(),
nc_dn_str=ou1,
exop=drsuapi.DRSUAPI_EXOP_REPL_OBJ,
replica_flags=drsuapi.DRSUAPI_DRS_WRIT_REP,
more_flags=drsuapi.DRSUAPI_DRS_GET_TGT)
(level, ctr) = self.drs.DsGetNCChanges(self.drs_handle, 10, req)
self._check_ctr6(ctr, [ou1], expected_links=[ou1_link])
# DRSUAPI_DRS_WRIT_REP means that we should only replicate the dn we give (dc3).
# DRSUAPI_DRS_GET_ANC means that we should also replicat |
google-research/tf-slim | tf_slim/training/__init__.py | Python | apache-2.0 | 701 | 0 | # coding=utf-8
# Copyright 2016 The TF-Slim Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, | software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expre | ss or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
|
alfss/django-sockjs-server | django_sockjs_server/templatetags/sockjs_server_tags.py | Python | bsd-3-clause | 580 | 0.001724 | from random import choice
from dja | ngo import template
from django_sockjs_server.lib.config import SockJSServerSettings
from django_sockjs_server.lib.token import Token
register = template.Library()
@register.simple_tag(name='sockjs_auth_token')
def sockjs_auth_token(room_ | name, unq_id=None):
token = Token()
if unq_id:
return token.get_secret_data(room_name+str(unq_id))
return token.get_secret_data(room_name)
@register.simple_tag(name='sockjs_server_url')
def sockjs_server_url():
config = SockJSServerSettings()
return choice(config.sockjs_url)
|
caelan/stripstream | stripstream/fts/clause.py | Python | mit | 7,480 | 0.001337 | from collections import defaultdict
from stripstream.pddl.logic.connectives import Not
from stripstream.pddl.operators import STRIPSAction
from stripstream.fts.derived import get_derived
from stripstream.fts.variable import FreeParameter, Par, X, nX, VarMember, is_parameter, is_constant, var_args, var_name, make_var_constants
from stripstream.fts.constraint import Eq, make_con_constants, DUMMY_CONSTRAINTS, Constraint
class Clause(object):
num = 0
prefix = 'Clause%s'
def __init__(self, constraints, name=None):
for con in constraints:
if not isinstance(con, Constraint):
raise ValueError('%s must be a %s' %
(con, Constraint.__name__))
self.constraints = tuple(constraints)
self.n = Clause.num
Clause.num += 1
self.name = name if name is not None else self.prefix % self.n
def __repr__(self):
return str(self.name)
def get_components(constraints):
edges = defaultdict(set)
for con in constraints:
if con.constraint is Eq:
a, b = con.values
if a != b:
edges[a].add(b)
edges[b].add(a)
components = []
reached = set()
def dfs(a):
reached.add(a)
components[-1].append(a)
for b in edges[a]:
if b not in reached:
dfs(b)
for a in list(edges):
if a not in reached:
components.append([])
dfs(a)
return components
def get_equality_map(constraints, var_map):
equality_map = {}
for component in get_components(constraints):
constants = filter(is_constant, component)
parameters = filter(is_parameter, component)
if constants:
assignment = constants[0]
if any(val != assignment for val in constants):
print 'Warning! Infeasible transition'
return None
else:
assignment = component[0]
dtypes = []
for param in parameters:
if isinstance(param, FreeParameter):
dtypes.append(param.type)
if isinstance(param, VarMember):
dtypes.append(var_map[var_name(param.var)].dtype)
if dtypes:
dtype = dtypes[0]
assert all(t == dtype for t in dtypes)
for param in parameters:
equality_map[param] = assignment
return equality_map
def get_constraint_parameters(constraints, use_axioms):
internal_params = set()
for con in constraints:
for item in con.values:
if isinstance(item, VarMember):
for arg in var_args(item.var):
if isinstance(arg, FreeParameter):
internal_params.add(arg)
if item.temp != X or not use_axioms:
internal_params.add(item)
elif isinstance(item, FreeParameter):
internal_params.add(item)
return internal_params
def get_assignments(internal_params, var_map, eq_map):
assign_map = {}
for param in internal_params:
if isinstance(param, FreeParameter):
dtype = param.type
elif isinstance(param, VarMember):
dtype = var_map[var_name(param.var)].dtype
else:
raise ValueError(param)
eq_param = eq_map.get(param, param)
if not is_constant(eq_param):
if eq_param not in assign_map:
assign_map[eq_param] = Par('%s' % len(assign_map), dtype)
assign_map[param] = assign_map[eq_param]
else:
assign_map[param] = dtype(eq_param)
return assign_map
def get_effects(var_map, effect_vars, assign_map):
effects = []
for var in effect_vars:
name, args = var_name(var), make_var_constants(var, var_map)
pre_args = [assign_map.get(p, p) for p in args + [X[var]]]
eff_args = [assign_map.get(p, p) for p in args + [nX[var]]]
predicate = var_map[name].predicate
effects += [predicate(*eff_args), Not(predicate(*pre_args))]
return effects
def get_fluent_preconditions(var_map, effect_vars, eq_map, assign_map):
fluent_preconditions = []
for var in effect_vars:
name, args = var_name(var), make_var_constants(var, var_map)
pre_args = [assign_map.get(p, p) for p in args + [X[var]]]
predicate = var_map[name].predicate
fluent_preconditions.append(predicate(*pre_args))
for item in eq_map:
if isinstance(item, VarMember) and item.temp == X and item.var not in effect_vars:
name, args = var_name(
item.var), make_var_constants(item.var, var_map)
pre_args = [assign_map.get(p, p) for p in args + [X[item.var]]]
fluent_preconditions.append(var_map[name].predicate(*pre_args))
return fluent_preconditions
def get_static_preconditions(constraints, var_map, internal_params, assign_map, axiom_map):
static_preconditions = []
for con in constraints:
if con.constraint != Eq and con.constraint not in DUMMY_CONSTRAINTS:
if all(not isinstance(item, VarMember) or item in internal_params for item in con.values):
values = make_con_constants(con)
static_preconditions.append(con.constraint.predicate(
*[assign_map.get(item, item) for item in values]))
else:
constants = set()
new_values = []
for item in con.values:
if not isinstance(item, VarMember) or item in internal_params:
new_values.append(assign_map.get(item, item))
constants.add(assign_map.get(item, item))
else:
constants.update(assign_map.get(arg, arg)
for arg in var_args(item.var))
new_values.append(
X(*[assign_map.get(arg, arg) for arg in item.var]))
| derived = get_derived(con.constraint(
*new_values), var_map, axiom_map, constants)
if derived not in static_preconditions:
static_preconditions.append(derived)
return static_preconditions
def convert_clause(clause, var_m | ap, axiom_map):
effect_vars = {item.var for con in clause.constraints for item in con.values
if isinstance(item, VarMember) and item.temp == nX}
eq_map = get_equality_map(clause.constraints, var_map)
if eq_map is None:
return None
internal_params = set(eq_map) | {X[var] for var in effect_vars} | {
nX[var] for var in effect_vars} | get_constraint_parameters(clause.constraints, axiom_map is not None)
assign_map = get_assignments(internal_params, var_map, eq_map)
clause.parameter_map = {
v: p for v, p in assign_map.iteritems() if isinstance(p, FreeParameter)}
parameters = filter(lambda p: isinstance(
p, FreeParameter), assign_map.values())
preconditions = get_fluent_preconditions(var_map, effect_vars, eq_map, assign_map) + get_static_preconditions(
clause.constraints, var_map, internal_params, assign_map, axiom_map)
effects = get_effects(var_map, effect_vars, assign_map)
action = STRIPSAction(clause.name, parameters, preconditions, effects)
action.clause = clause
return action
|
GenericStudent/home-assistant | tests/helpers/test_update_coordinator.py | Python | apache-2.0 | 7,348 | 0 | """Tests for the update coordinator."""
import asyncio
from datetime import timedelta
import logging
import urllib.error
import aiohttp
import pytest
import requests
from homeassistant.helpers import update_coordinator
from homeassistant.util.dt import utcnow
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import async_fire_time_changed
_LOGGER = logging.getLogger(__name__)
def get_crd(hass, update_interval):
"""Make coordinator mocks."""
calls = 0
async def refresh() -> int:
nonlocal calls
calls += 1
return calls
crd = update_coordinator.DataUpdateCoordinator[int](
hass,
_LOGGER,
name="test",
update_method=refresh,
update_interval=update_interval,
)
return crd
DEFAULT_UPDATE_INTERVAL = timedelta(seconds=10)
@pytest.fixture
def crd(hass):
"""Coordinator mock with default update interval."""
return get_crd(hass, DEFAULT_UPDATE_INTERVAL)
@pytest.fixture
def crd_without_update_interval(hass):
"""Coordinator mock that never automatically updates."""
return get_crd(hass, None)
async def test_async_refresh(crd):
"""Test async_refresh for update coordinator."""
assert crd.data is None
await crd.async_refresh()
assert crd.data == 1
assert crd.last_update_success is True
# Make sure we didn't schedule a refresh because we have 0 listeners
assert crd._unsub_refresh is None
updates = []
def update_callback():
updates.append(crd.data)
unsub = crd.async_add_listener(update_callback)
await crd.async_refresh()
assert updates == [2]
assert crd._unsub_refresh is not None
# Test unsubscribing through function
unsub()
await crd.async_refresh()
assert updates == [2]
# Test unsubscribing through method
crd.async_add_listener(update_callback)
crd.async_remove_listener(update_callback)
await crd.async_refresh()
assert updates == [2]
async def test_request_refresh(crd):
"""Test request refresh for update coordinator."""
assert crd.data is None
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
# Second time we hit the debonuce
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
async def test_request_refresh_no_auto_update(crd_without_update_interval):
"""Test request refresh for update coordinator without automatic update."""
crd = crd_without_update_interval
assert crd.data is None
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
# Second time we hit the debonuce
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
@pytest.mark.parametrize(
"err_msg",
[
(asyncio.TimeoutError, "Timeout fetching test data"),
(requests.exceptions.Timeout, "Timeout fetching test data"),
(urllib.error.URLError("timed out"), "Timeout fetching test data"),
(aiohttp.ClientError, "Error requesting test data"),
(requests.exceptions.RequestException, "Error requesting test data"),
(urllib.error.URLError("something"), "Error requesting test data"),
(update_coordinator.UpdateFailed, "Error fetching test data"),
],
)
async def test_refresh_known_errors(err_msg, crd, caplog):
"""Test raising known errors."""
crd.update_method = AsyncMock(side_effect=err_msg[0])
await crd.async_refresh()
assert crd.data is None
assert crd.last_update_success is False
assert err_msg[1] in caplog.text
async def test_refresh_fail_unknown(crd, caplog):
"""Test raising unknown error."""
await crd.async_refresh()
crd.update_method = AsyncMock(side_effect=ValueError)
await crd.async_refresh()
assert crd.data == 1 # value from previous fetch
assert crd.last_update_success is False
assert "Unexpected error fetching test data" in caplog.text
async def test_refresh_no_update_method(crd):
"""Test raising error is no update method is provided."""
await crd.async_refresh()
crd.update_method = None
with pytest.raises(NotImplementedError):
await crd.async_refresh()
async def test_update_interval(hass, crd):
"""Test update interval works."""
# Test we don't update without subscriber
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
assert crd.data is None
# Add subscriber
update_callback = Mock()
crd.async_add_listener(update_callback)
# Test twice we update with subscriber
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
assert crd.data == 1
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
assert crd.data == 2
# Test removing listener
crd.async_remove_listener(update_callback)
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
# Test we stop updating after we lose last subscriber
assert crd.data == 2
async def test_update_interval_not | _present(hass, crd_without_update_interval):
"""Test update never happens with no update interval."""
crd = crd_without_update_interval
# Test we don' | t update without subscriber with no update interval
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
assert crd.data is None
# Add subscriber
update_callback = Mock()
crd.async_add_listener(update_callback)
# Test twice we don't update with subscriber with no update interval
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
assert crd.data is None
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
assert crd.data is None
# Test removing listener
crd.async_remove_listener(update_callback)
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
# Test we stop don't update after we lose last subscriber
assert crd.data is None
async def test_refresh_recover(crd, caplog):
"""Test recovery of freshing data."""
crd.last_update_success = False
await crd.async_refresh()
assert crd.last_update_success is True
assert "Fetching test data recovered" in caplog.text
async def test_coordinator_entity(crd):
"""Test the CoordinatorEntity class."""
entity = update_coordinator.CoordinatorEntity(crd)
assert entity.should_poll is False
crd.last_update_success = False
assert entity.available is False
await entity.async_update()
assert entity.available is True
with patch(
"homeassistant.helpers.entity.Entity.async_on_remove"
) as mock_async_on_remove:
await entity.async_added_to_hass()
assert mock_async_on_remove.called
# Verify we do not update if the entity is disabled
crd.last_update_success = False
with patch("homeassistant.helpers.entity.Entity.enabled", False):
await entity.async_update()
assert entity.available is False
|
Weasyl/weasyl | gunicorn.conf.py | Python | apache-2.0 | 173 | 0 | wsgi_app = "weasyl.wsgi:make_wsgi_app()"
proc_name = "weasyl | "
preload_app = False
secure_scheme_hea | ders = {
'X-FORWARDED-PROTO': 'https',
}
forwarded_allow_ips = '*'
|
chrisnorman7/game | commands/options.py | Python | mpl-2.0 | 2,845 | 0 | """Provides the options command."""
from functools import partial
from gsb.intercept import Menu
from forms import set_value
from parsers import parser
from options import options
from util import done
def show_section(section, caller):
"""Show the player an instance of OptionsMenu."""
caller.connection.notify(OptionsMenu, section)
def show_option(option, caller):
"""Show and edit an option."""
player = caller.connection.pla | yer
def invalid_input(caller):
"""Show invalid input warning."""
player.notify('Invalid input: %r.', caller.text)
show_section(option.section, caller)
def after(caller):
"""Set the value."""
done(player)
show_section(option.section, caller)
value = getattr(player.options, option.name)
if value is True:
| value = 'Enabled'
elif value is False:
value = 'Disabled'
elif value is None:
value = 'Clear'
else:
value = repr(value)
player.notify(
'%s\n%s\nCurrent value: %s\n',
option.friendly_name,
option.description,
value
)
set_value(
player.options,
option.name,
caller,
after=after,
invalid_input=invalid_input
)
class OptionsMenu(Menu):
"""Show all the sub-sections and the options of a section."""
def __init__(self, section):
self.section = section
super(OptionsMenu, self).__init__(
title='%s\n%s' % (
section.name,
section.description
),
restore_parser=parser
)
def explain(self, connection):
"""Build the menu."""
player = connection.player
self.items.clear()
self.labels.clear()
if self.section.sections or self.section.parent is not None:
self.add_label('Sections', None)
for subsection in self.section.sections:
if subsection.allowed is None or subsection.allowed(player):
self.item(subsection.name)(partial(show_section, subsection))
if self.section.parent is not None:
self.item('Back to %s' % self.section.parent.name)(
partial(show_section, self.section.parent)
)
if self.items:
after = self.items[-1]
else:
after = None
if self.section.options:
self.add_label('Options', after)
for option in self.section.options:
self.item(option.friendly_name)(partial(show_option, option))
return super(OptionsMenu, self).explain(connection)
@parser.command
def do_options(caller):
"""options
Set options for your player."""
show_section(options, caller)
|
rh-s/heat | contrib/rackspace/rackspace/resources/cloudnetworks.py | Python | apache-2.0 | 5,069 | 0 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from heat.common.i18n import _
from heat.common.i18n import _LW
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
import six
try:
from pyrax.exceptions import NetworkInUse # noqa
from pyrax.exceptions import NotFound # noqa
PYRAX_INSTALLED = True
except ImportError:
PYRAX_INSTALLED = False
class NotFound(Exception):
"""Dummy pyrax exception - only used for testing."""
class NetworkInUse(Exception):
"""Dummy pyrax exception - only used for testing."""
LOG = logging.getLogger(__name__)
class CloudNetwork(resource.Resource):
"""A resource for creating Rackspace Cloud Networks.
See http://www.rackspace.com/cloud/networks/ for service
documentation.
"""
support_status = support.SupportStatus(
status=support.DEPRECATED,
message=_('Use OS::Neutron::Net instead.'),
version='2015.1',
previous_status=support.SupportStatus(version='2014.1')
)
PROPERTIES = (
LABEL, CIDR
) = (
"label", "cidr"
)
ATTRIBUTES = (
CIDR_ATTR, LABEL_ATTR,
) = (
'cidr', 'label',
)
properties_schema = {
LABEL: properties.Schema(
properties.Schema.STRING,
_("The name of the network."),
required=True,
constraints=[
constraints.Length(min=3, max=64)
]
),
CIDR: properties.Schema(
properties.Schema.STRING,
_("The IP block from which to allocate | the network. For example, "
"172.16.0.0/24 or 2001:DB8::/64."),
required=True,
constraints=[
constraints.CustomConstraint('net_cidr')
]
)
}
attributes_schema = {
CIDR_ATTR: attributes.Schema(
_("The CIDR for an isolated private network.")
),
LABEL_ATTR: attributes.Schema(
_("The n | ame of the network.")
),
}
def __init__(self, name, json_snippet, stack):
resource.Resource.__init__(self, name, json_snippet, stack)
self._network = None
def network(self):
if self.resource_id and not self._network:
try:
self._network = self.cloud_networks().get(self.resource_id)
except NotFound:
LOG.warn(_LW("Could not find network %s but resource id is"
" set."), self.resource_id)
return self._network
def cloud_networks(self):
return self.client('cloud_networks')
def handle_create(self):
cnw = self.cloud_networks().create(label=self.properties[self.LABEL],
cidr=self.properties[self.CIDR])
self.resource_id_set(cnw.id)
def handle_check(self):
self.cloud_networks().get(self.resource_id)
def handle_delete(self):
'''Delete cloud network.
Cloud Network doesn't have a status attribute, and there is a non-zero
window between the deletion of a server and the acknowledgement from
the cloud network that it's no longer in use, so it needs some way to
keep track of when the delete call was successfully issued.
'''
network_info = {
'delete_issued': False,
'network': self.network(),
}
return network_info
def check_delete_complete(self, network_info):
network = network_info['network']
if not network:
return True
if not network_info['delete_issued']:
try:
network.delete()
except NetworkInUse:
LOG.warn(_LW("Network '%s' still in use."), network.id)
else:
network_info['delete_issued'] = True
return False
try:
network.get()
except NotFound:
return True
return False
def validate(self):
super(CloudNetwork, self).validate()
def _resolve_attribute(self, name):
net = self.network()
if net:
return six.text_type(getattr(net, name))
return ""
def resource_mapping():
return {'Rackspace::Cloud::Network': CloudNetwork}
def available_resource_mapping():
if PYRAX_INSTALLED:
return resource_mapping()
return {}
|
transplantation-immunology/EMBL-HLA-Submission | saddlebags/AlleleSubmission.py | Python | lgpl-3.0 | 3,176 | 0.003463 | # This file is part of saddle-bags.
#
# saddle-bags is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# saddle-bags is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with saddle-bags. If not, see <http://www.gnu.org/licenses/>.
from saddlebags.HlaSequence import HlaSequence
import logging
class SubmissionBatch():
def __init__(self, includeInitialSubmission):
if(includeInitialSubmission):
# Starting with a single empty submission in the batch.
self.submissionBatch = [AlleleSubmission()]
else:
# Starting with an empty batch
self.submissionBatch = []
self.enaUserName = None
self.enaPassword = None
self.ipdSubmitterId = None
self.ipdSubmitterName = None
self.ipdAltContact = None
self.ipdSubmitterEmail = None
self.labOfOrigin = None
self.labContact = None
self.studyAccession = None
self.chooseStudy = "2" # 2 = new study. 1 = existing study, use the studyaccession number. Study=Project
self.studyId = None
self.studyShortTitle = None
self.studyAbstract = None
class AlleleSubmission():
def __init__(self):
self.submittedAllele=HlaSequence()
self.localAlleleName = None
self.closestAlleleWrittenDescription = None
self.ipdSubmissionIdentifier = None
self.ipdSubmissionVersion = None
self.enaAccessionIdentifier = None
# TODO: i think this column is intended for use identifying cell | line names, if we are submitting the HLA types of cell lines. I'm just using it as a sample ID or cellnum. Lets see where that breaks.
self.cellId = None
self.ethnicOrigin = None
self.sex = None
self.consanguineous = None
self.homozygous = None
# Necessary = A,B, DRB1. The rest are extra, and they help James trust the submitted sequenc | e.
# I store the typed alleles as a dictionary. Key is the Locus (HLA-A) and the value is a String with the alleles, separated by a comma (02:01,03:01:14)
self.typedAlleles = {}
self.materialAvailability = None
self.cellBank = None
self.primarySequencingMethodology = None
self.secondarySequencingMethodology = None
self.primerType = None
self.primers = None
self.sequencedInIsolation = None
self.sequencingDirection = None
self.numOfReactions = None
self.methodComments = None
self.citations = None
self.enaSubmissionText = None
self.ipdSubmissionText = None
self.isPseudoGene = False # A null allele uses pseudogene if length of the coding sequence is not a multiple of 3.
|
gmt/portage | pym/portage/sync/modules/git/git.py | Python | gpl-2.0 | 3,109 | 0.024124 | # Copyright 2005-2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import logging
import subprocess
import portage
from portage import os
from portage.util import writemsg_level
from portage.output import create_color_func
good = create_color_func("GOOD")
bad = create_color_func("BAD")
warn = create_color_func("WARN")
from portage.sync.syncbase import NewBase
class GitSync(NewBase):
'''Git sync class'''
short_desc = "Perform sync operations on git based repositories"
@staticmethod
def name():
return "GitSync"
def __init__(self):
NewBase.__init__(self, "git", portage.const.GIT_PACKAGE_ATOM)
def exists(self, **kwargs):
'''Tests whether the repo actually exists'''
return os.path.exists(os.path.join(self.repo.location, '.git'))
def new(self, **kwargs):
'''Do the initial clone of the repository'''
if kwargs:
self._kwargs(kwargs)
try:
if not os.path.exists(self.repo.location):
os.makedirs(self.repo.location)
self.logger(self.xterm_titles,
'Created new directory %s' % self.repo.location)
except IOError:
return (1, False)
sync_uri = self.repo.sync_uri
if sync_uri.startswith("file://"):
sync_uri = sync_uri[6:]
git_cmd_opts = ""
if self.settings.get("PORTAGE_QUIET") == "1":
git_cmd_opts += " --quiet"
if self.repo.sync_depth is not None:
git_cmd_opts += " --depth %d" % self.repo.sync_depth
git_cmd = "%s clone%s %s ." % (self.bin_command, git_cmd_opts,
portage._shell_quote(sync_uri))
writemsg_level(git_cmd + "\n")
exitcode = portage.process.spawn_bash("cd %s ; exec %s" % (
portage._shell_quote(self.repo.location), git_cmd),
**portage._native_kwargs(self.spawn_kwargs))
if exitcode != os.EX_OK:
msg = "!!! git clone error in %s" % self.repo.location
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
return (os.EX_OK, True)
def update(self):
''' Update existing git repository, and ignore the syncuri. We are
going to trust the user and assume that the user is in the branch
that he/she wants u | pdated. We'll let the user manage branches with
git directly.
'''
git_cmd_opts = ""
if self.settings.get("PORTAGE_QUIET") == "1":
git_cmd_opts += " --quiet"
git_cmd = "%s pull%s" % (self.bin_command, git_c | md_opts)
writemsg_level(git_cmd + "\n")
rev_cmd = [self.bin_command, "rev-list", "--max-count=1", "HEAD"]
previous_rev = subprocess.check_output(rev_cmd,
cwd=portage._unicode_encode(self.repo.location))
exitcode = portage.process.spawn_bash("cd %s ; exec %s" % (
portage._shell_quote(self.repo.location), git_cmd),
**portage._native_kwargs(self.spawn_kwargs))
if exitcode != os.EX_OK:
msg = "!!! git pull error in %s" % self.repo.location
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
current_rev = subprocess.check_output(rev_cmd,
cwd=portage._unicode_encode(self.repo.location))
return (os.EX_OK, current_rev != previous_rev)
|
davidbgk/udata | udata/core/post/models.py | Python | agpl-3.0 | 1,465 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from flask import url_for
from udata.core.storages import images, default_image_basename
from udata.i18n import lazy_gettext as _
from udata.models import db
__all__ = ('Post', )
IMAGE_SIZES = [400, 100, 50]
class Post(db.Datetimed, db.Document):
name = db.StringField(max_length=255, required=True)
slug = db.SlugField(
max_length=255, required=True, populate_from='name', update=True)
headline = db.StringField()
content = db.StringField(r | equired=True)
image_url = db.StringField()
image = db.ImageField(
fs=images, basename=default_image_basename, thumbnails=IMAGE_SIZES)
credit_to = db.StringField()
credit_url = db.URLFie | ld()
tags = db.ListField(db.StringField())
datasets = db.ListField(
db.ReferenceField('Dataset', reverse_delete_rule=db.PULL))
reuses = db.ListField(
db.ReferenceField('Reuse', reverse_delete_rule=db.PULL))
owner = db.ReferenceField('User')
private = db.BooleanField()
meta = {
'ordering': ['-created_at'],
}
verbose_name = _('post')
def __unicode__(self):
return self.name or ''
def url_for(self, *args, **kwargs):
return url_for('posts.show', post=self, *args, **kwargs)
@property
def display_url(self):
return self.url_for()
@property
def external_url(self):
return self.url_for(_external=True)
|
cydenix/OpenGLCffi | OpenGLCffi/GL/EXT/ARB/vertex_type_2_10_10_10_rev.py | Python | mit | 3,794 | 0.010543 | from OpenGLCffi.GL import params
@params(api='gl', prms=['index', 'type', 'normalized', 'value'])
def glVertexAttribP1ui(index, type, normalized, value):
pass
@params(api='gl', prms=['index', 'type', 'normalized', 'value'])
def glVertexAttribP1uiv(index, type, normalized, value):
pass
@params(api='gl', prms=['index', 'type', 'normalized', 'value'])
def glVertexAttribP2ui(index, type, normalized, value):
pass
@params(api='gl', prms=['index', 'type', 'normalized', 'value'])
def glVertexAttribP2uiv(index, type, normalized, value):
pass
@params(api='gl', prms=['index', 'type', 'normalized', 'value'])
def glVertexAttribP3ui(index, type, normalized, value):
pass
@params(api='gl', prms=['index', 'type', 'normalized', 'value'])
def glVertexAttribP3uiv(index, type, normalized, value):
pass
@params(api='gl', prms=['index', 'type', 'normalized', 'value'])
def glVertexAttribP4ui(index, type, normalized, value):
pass
@params(api='gl', prms=['index', 'type', 'normalized', 'value'])
def glVertexAttribP4uiv(index, type, normalized, value):
pass
@params(api='gl', prms=['type', 'value'])
def glVertexP2ui(type, value):
pass
@params(api='gl', prms=['type', 'value'])
def glVertexP2uiv(type, value):
pass
@params(api='gl', prms=['type', 'value'])
def glVertexP3ui(type, value):
pass
@params(api='gl', prms=['type', 'value'])
def glVertexP3uiv(type, value):
pass
@params(api='gl', prms=['type', 'value'])
def glVertexP4ui(type, value):
pass
@params(api='gl', prms=['type', 'value'])
def glVertexP4uiv(type, value):
pass
@params(api='gl', prms=['type', 'coords'])
def glTexCoordP1ui(type, coords):
pass
@params(api='gl', prms=['type', 'coords'])
def glTexCoordP1uiv(type, coords):
pass
@params(api='gl', prms=['type', 'coords'])
def glTexCoordP2ui(type, coords):
pass
@params(api='gl', prms=['type', 'coords'])
def glTexCoordP2uiv(type, coords):
pass
@params(api='gl', prms=['type', 'coords'])
def glTexCoordP3ui(type, coords):
pass
@params(api='gl', prms=['type', 'coords'])
def glTexCoordP3uiv(type, coords):
pass
@params(api='gl', prms=['type', 'coords'])
def glTexCoordP4ui(type, coords):
pass
@params(api='gl', prms=['type', 'coords'])
def glTexCoordP4uiv(type, coords):
pass
@params(api='gl', prms=['texture', 'type', 'coords'])
def glMultiTexCoordP1ui(texture, type, coords):
pass
@params(api='gl', prms=['texture', 'type', 'coords'])
def glMultiTexCoordP1uiv(texture, type, coords):
pass
@params(api='gl', prms=['texture', 'type', 'coords'])
def glMultiTexCoordP2ui(texture, type, coords):
pass
@params(api='gl', prms=['texture', 'type', 'coords'])
def glMultiTexCoordP2uiv(texture, type, coords):
pass
@params(api='gl', prms=['texture', 'type', 'coords'])
def glMultiTexCoordP3ui(texture, type, coords):
pass
@params(api='gl', prms=['texture', 'type', 'coords'])
def glMultiTexCoordP3uiv(texture, type, coords):
pass
@params(api='gl', prms=['texture', 'type', 'coords'])
def glMultiTexCoordP4ui(texture, type, coords):
pass
@params(api='gl', prms=['texture', 'type', 'coords'])
def glMultiTexCoordP4uiv(texture, type, coords):
pass
@params(api='gl', prms=['type', 'coords'])
def glNormalP3ui(type, coords):
pass
@params(api='gl', prms=['type', 'coords'])
def glNormalP3uiv(type, coords):
pass
@params(api='gl', prms=['t | ype', 'color'])
def glColorP3ui(type, color):
pass
@params(api='gl', prms=['type', 'color'])
def glColorP3uiv(type, color):
pass
@params(api='gl', prms=['type', 'color'])
def glColorP4ui(type, color):
pass
@para | ms(api='gl', prms=['type', 'color'])
def glColorP4uiv(type, color):
pass
@params(api='gl', prms=['type', 'color'])
def glSecondaryColorP3ui(type, color):
pass
@params(api='gl', prms=['type', 'color'])
def glSecondaryColorP3uiv(type, color):
pass
|
amarandon/pinax | pinax/projects/code_project/urls.py | Python | mit | 2,413 | 0.008703 | from django.conf import settings
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from django.contrib import admin
admin.autodiscover()
from tagging.models import TaggedItem
from wakawaka.models import WikiPage
from pinax.apps.account.openid_consumer import PinaxConsumer
from pinax.apps.projects.models import Project
from pinax.apps.tasks.models import Task
from pinax.apps.topics.models import Topic as ProjectTopic
handler500 = "pinax.views.server_error"
urlpatterns = patterns("",
url(r"^$", direct_to_template, {
"template": "homepage.html",
}, name="home"),
url(r"^admin/invite_user/$", "pinax.apps.signup_codes.views.admin_invite_user", name="admin_invite_user"),
url(r"^admin/", include(admin.site.urls)),
url(r"^about/", include("about.urls")),
url(r"^account/", include("pinax.apps.account.urls")),
url(r"^openid/", include(PinaxConsumer().urls)),
url(r"^profiles/", include("idios. | urls")),
url( | r"^notices/", include("notification.urls")),
url(r"^avatar/", include("avatar.urls")),
url(r"^comments/", include("threadedcomments.urls")),
url(r"^announcements/", include("announcements.urls")),
url(r"^tagging_utils/", include("pinax.apps.tagging_utils.urls")),
url(r"^attachments/", include("attachments.urls")),
url(r"^projects/", include("pinax.apps.projects.urls")),
)
tagged_models = (
dict(title="Projects",
query=lambda tag: TaggedItem.objects.get_by_model(Project, tag),
),
dict(title="Project Topics",
query=lambda tag: TaggedItem.objects.get_by_model(ProjectTopic, tag),
),
dict(title="Project Tasks",
query=lambda tag: TaggedItem.objects.get_by_model(Task, tag),
),
dict(title="Wiki Articles",
query=lambda tag: TaggedItem.objects.get_by_model(WikiPage, tag),
),
)
tagging_ext_kwargs = {
"tagged_models": tagged_models,
}
urlpatterns += patterns("",
url(r"^tags/(?P<tag>.+)/(?P<model>.+)$", "tagging_ext.views.tag_by_model",
kwargs=tagging_ext_kwargs, name="tagging_ext_tag_by_model"),
url(r"^tags/(?P<tag>.+)/$", "tagging_ext.views.tag",
kwargs=tagging_ext_kwargs, name="tagging_ext_tag"),
url(r"^tags/$", "tagging_ext.views.index", name="tagging_ext_index"),
)
if settings.SERVE_MEDIA:
urlpatterns += patterns("",
url(r"", include("staticfiles.urls")),
)
|
SeMorgana/ctf | defcon2014/3dttt.py | Python | gpl-3.0 | 5,141 | 0.024703 | #5/17/2014
import telnetlib
import random
from utility import *
tn = telnetlib.Telnet("3dttt_87277cd86e7cc53d2671888c417f62aa.2014.shallweplayaga.me",1234)
X = 'X'
O = 'O'
def get_sym(coor): #sym => symmetric
if coor == 0:
return 2
if coor == 1:
return 1
if coor == 2:
return 0
def get_move(new_O_pos):
x,y,z = new_O_pos #x,y are in wrong order
return (get_sym(x),get_sym(y),get_sym(z))
def get_new_pos(pre,cur):
for i in cur:
if not (i in pre):
return i
def is_all_empty(open_all):
ret = True
for i in range(9):
ret = ret and (len(open_all[i]) == 0)
return ret
def get_next_open(open_all): #open_all: tuple of list of tuples
valid = []
for i in range(9):
if len(open_all[i])>0:
if i in [0,1,2]:
z = 0
elif i in [3,4,5]:
z = 1
elif i in [6,7,8]:
z = 2
for j in open_all[i]:
valid.append((j[0],j[1],z))
index = random.randint(0,len(valid)-1)
return valid[index]
#return (open_all[i][0][0],open_all[i][0][1],z)
def get_empty(row1,row_num):
open_list =[] #list of tuples
lis = row1.split()
if len(lis) == 2:
open_list.append((row_num,0));
open_list.append((row_num,1));
open_list.append((row_num,2));
elif len(lis) == 3:
if X in lis:
index = lis.index(X)
if index == 0:
open_list.append((row_num,1))
open_list.append((row_num,2))
elif index == 1:
open_list.append((row_num,0))
open_list.append((row_num,2))
elif index == 2:
open_list.append((row_num,0))
open_list.append((row_num,1))
elif O in lis:
index = lis.index(O)
if index == 0:
open_list.append((row_num,1))
open_list.append((row_num,2))
elif index == 1:
open_list.append((row_num,0))
open_list.append((row_num,2))
elif index == 2:
open_list.append((row_num,0))
open_list.append((row_num,1))
elif len(lis) == 4:
if lis[0] == '|':
open_list.append((row_num,0))
elif lis[3] == '|':
open_list.append((row_num,2))
else:
open_list.append((row_num,1))
return open_list
def main():
score_list = get_score_list()
turns = 0
pre_Olist = [] #list of tuples
cur_Olist = [] #same above
while True:
ret = tn.read_until("y\n")
print ret
tn.read_until("0")
row00 = tn.read_until("\n").strip()
tn.read_until("1") #skip
row01 = tn.read_until("\n").strip()
tn.read_until("2") #skip
row02 = tn.read_until("\n").strip()
ret = tn.read_until("y\n")
tn.read_until("0")
row10 = tn.read_until("\n").strip()
tn.read_until("1") #skip
row11 = tn.read_until("\n").strip()
tn.read_until("2") #skip
row12 = tn.read_until("\n").strip()
ret = tn.read_until("y\n")
tn.read_until("0")
row20 = tn.read_until("\n").strip()
tn.read_until("1") #skip
row21 = tn.read_until("\n").strip()
tn.read_until("2") #skip
row22 = tn.read_until("\n").strip()
#print row00
#print row01
#print row02
#print ""
open0 = (get_empty(row00,0), get_empty(row01,1), get_empty(row02,2))
#print row10
#print row11
#print row12
#print ""
open1 = (get_empty(row10,0), get_empty(row11,1), get_empty(row12,2))
#print row20
#print row21
#print row22
open2 = (get_empty(row20,0), get_empty(row21,1), get_empty(row22,2))
rows = (row00,row01,row02,row10,row11,row12,row20,row21,row22)
ret = tn.read_some()
print ret
open_all = (open0[0],open0[1],open0[2],open1[0],open1[1],open1[2],open2[0],open2[1],open2[2])
open_list = convert_open_list(open_all)
if is_all_empty(open_all):
ret = tn.read_some()
print ret
pre_Olist = []
cur_Olist = []
turns = 0
#return
continue
y,x,z = get_next_open(open_all)
Xlist = get_pos_list(rows,'X')
Olist = get_pos_list(rows,'O')
next_move = minimax(Xlist,Olist,open_list)
print "next move", next_move
#get_score(score_list,Xlist,Olist)
if turns==0:
send = "1,1,1"
cur_Olist = get_pos_list(rows,'O')
turns += 1
else:
pre_Olist = cur_Olist;
cur_Olist = get_pos_list(rows,'O')
new_pos = get_new_pos(pre_Olist,cur_Olist)
#y,x,z = get_move(new_pos)
y,x,z = next_ | move
send = str(x)+","+str(y)+","+str(z)
print "sending ",send
tn.write(send+"\n")
|
if __name__=="__main__":
main()
|
BoolLi/LeapMotionDesignChallenge | Plotting.py | Python | mit | 602 | 0.004983 | # Name: Seline, Li, Taylor, Son
# Leap Motion project
import matplotlib as mpl
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import matplotlib.pyplot as plt
import time
mpl.rcParams['legend.fontsize'] = 10
fig = plt.figure()
ax = Axes3D(fig)
theta = np.linspace(-4 * np.pi, 4 * np.pi, 100)
z = np.linspace(-2, 2, 100)
r = z**2 + 1
x = r * np.sin(theta)
y = r * np.cos(theta)
ax.plot(x, y, z, label='parametric curve')
ax.le | gend()
plt.ion()
plt.show()
for ii in xrange(0,360,1):
ax.view_init(elev=10, azim=ii)
| plt.draw()
print "drawn? " + str(ii)
time.sleep(0.01)
|
tino/python-telegram-bot | telegram/contact.py | Python | gpl-3.0 | 1,745 | 0 | #!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015 Leandro Toledo de Souza <leandrotoeldodesouza@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
from telegram import TelegramObject
class Contact(TelegramObject):
def __init__(self,
phone_number,
first_name,
last_name=None,
user_id=None):
self.phone_number = phone_number
self.first_name = first_name
self.last_name = last_name
self.user_id = user_id
@staticmethod
def de_j | son(data):
return Contact(phone_number=data.get('phone_number', None),
first_name=data.get('first_name', None),
last_name=data.get('last_name', None),
user_id=data.get('user_id', None))
def to_dict(self):
data = {'ph | one_number': self.phone_number,
'first_name': self.first_name}
if self.last_name:
data['last_name'] = self.last_name
if self.user_id:
data['user_id'] = self.user_id
return data
|
dhinakg/BitSTAR | api/database/DAL/__init__.py | Python | apache-2.0 | 1,348 | 0.006677 | # Copyright 2017 Starbot Discord Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from api.database.DAL import SQLite
from api.database.db import DB
def db_open(db_in):
if DB.type == "SQLite":
SQLite.db_open(db_in)
def db_close(db_in):
if DB.type == "SQLite":
SQLite. | close(db_in)
def db_create_table(db_in, tablename):
if DB.type == "SQLite":
SQLite.db_create_table(db_in, tablename)
def db_insert(db_in, table, dict_in):
if DB.type == "SQLite":
return SQLite.db_insert(db_in, table, dict_in)
def db_get_contents_of_table(db_in, table, rows):
if DB.type == " | SQLite":
return SQLite.db_get_contents_of_table(db_in, table, rows)
def db_get_latest_id(db_in, table):
if DB.type == "SQLite":
return SQLite.db_get_latest_id(db_in, table) |
mrc75/django-service-status | tests/settings.py | Python | mit | 1,279 | 0 | # -*- coding: utf-8
from __future__ import unicode_literals, absolute_import
import django
DEBUG = True
USE_TZ = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
'interface': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'service_status',
]
SITE_ID = 1
if django.VERSION >= (1, 10):
MIDDLEWARE = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware'
)
else:
MIDDLEWARE_CLASSES = (
' | django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware'
)
STATIC_URL = '/static/'
TEMPLATES = [
{
| 'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
# 'OPTIONS': {},
},
]
TEST_RUNNER = 'tests.runner.PytestTestRunner'
|
Nablaquabla/sns-analysis | _getAcceptanceVsTime.py | Python | gpl-3.0 | 6,613 | 0.013912 | #!/home/bjs66/anaconda2/bin/python2.7
"""
Created on Mon Feb 01 15:03:56 2016
@author: Nablaquabla
"""
import h5py
import numpy as np
import easyfit as ef
import datetime
import pytz
import os
# Prepare timezones and beginning of epcoch for later use
utc = pytz.utc
eastern = pytz.timezone('US/Eastern')
epochBeginning = utc.localize(datetime.datetime(1970,1,1))
# ============================================================================
# Run program
# ============================================================================
if __name__ == '__main__':
runDirs = ['Run-15-06-25-12-53-44','Run-15-06-26-11-23-13','Run-15-07-31-18-30-14',
'Run-15-08-18-14-51-18','Run-15-08-31-00-23-36','Run-15-09-21-20-58-01',
'Run-15-09-23-21-16-00','Run-15-10-03-09-26-22','Run-15-10-13-13-27-09',
'Run-15-10-21-13-12-27','Run-15-10-29-15-56-36','Run-15-11-09-11-30-13',
'Run-15-11-20-11-34-48','Run-15-11-24-15-35-32','Run-15-12-14-11-21-45',
'Run-15-12-26-08-30-40','Run-16-01-07-12-16-36','Run-16-02-02-16-26-26',
'Run-16-02-15-13-46-34','Run-16-02-29-11-54-20','Run-16-03-09-13-00-14',
'Run-16-03-22-18-09-33','Run-16-03-30-12-44-57','Run-16-04-12-11-54-27',
'Run-16-04-20-11-22-48','Run-16-05-05-14-08-52','Run-16-05-17-14-40-34',
'Run-16-06-02-12-35-56','Run-16-06-17-12-09-12','Run-16-06-27-17-50-08',
'Run-16-07-06-18-25-19','Run-16-07-12-11-44-55','Run-16-07-18-11-50-24',
'Run-16-07-21-11-59-39','Run-16-07-28-12-49-17']
# runDirs = ['Run-15-06-25-12-53-44']
dataDir = '/home/bjs66/csi/bjs-analysis/Processed/'
powerDir = '/home/bjs66/csi/bjs-analysis/BeamPowerHistory/'
# Read stop times of runs:
data = np.loadtxt('/home/bjs66/GitHub/sns-analysis/start-stop-times-of-runs.txt',dtype=str)
# Prepare output file
h5Out = h5py.File(dataDir + 'Stability/Acceptances.h5','w')
# Prepare beam power file
h5Power= h5py.File(powerDir + 'BeamPowerHistory.h5','r')
# For each run to be analyzed
for run in runDirs:
print run
# Find each day in the run
dayNames = np.sort(os.listdir(dataDir + run))
# Get the correct stopping time for the run
for d in data:
if run in d:
easternEndTS = eastern.localize(datetime.datetime.strptime(d[1],'%y-%m-%d-%H-%M-%S'))
utcEndTS = (easternEndTS.astimezone(utc) - epochBeginning).total_seconds()
# Prepare output arrays:
dOut = {'time': [], 'sPT': [], 'bPT': [], 'muon': [], 'linGate': [], 'overflow': [], 'power': [], 'duration': [], 'beamOnDuration': [], 'beamOffDuration': []}
# For each day in the run
for dN in dayNames:
# Open the day as input file from which all the data will be derived
h5In = h5py.File(dataDir + run + '/' + dN)
# Crop the time from the filename
day = '20' + dN.split('.')[0]
# Check if this is the last day in a run, if so set the last timestamp for this day to the end
# otherwise set last tiemstamp for this day to the second before midnight
easternDayTS = eastern.localize(datetime.datetime.strptime(day,'%Y%m%d'))
if easternDayTS.da | te() == easternEndTS.date():
lastTSForThisDay = utcEndTS
| else:
lastTSForThisDay = ((easternDayTS + datetime.timedelta(days=1)).astimezone(utc) - epochBeginning).total_seconds()
# Read power data for current day
timeData = h5Power['/%s/time'%day][...]
powerData = h5Power['/%s/power'%day][...]
# Get all time data in the info file, convert it to seconds since epoch
# and add the proper last timestamp of the day as determined above
timeArray = np.sort(h5In['/I'].keys())
utcTS = []
for time in timeArray:
dayTS = datetime.datetime.strptime('%s%s'%(day,time),'%Y%m%d%H%M%S')
easternTS = eastern.localize(dayTS)
utcTS.append((easternTS.astimezone(utc) - epochBeginning).total_seconds())
utcTS.append(lastTSForThisDay)
# For each time in the info file get the data needed to calculate the acceptances for those
# periods and integrate the power for that timefram
for i in range(len(timeArray)):
# Get the time as eastern time string
time = timeArray[i]
# Get the proper start and stop timestamps in utc seconds since epoch
tsStart = utcTS[i]
tsStop = utcTS[i+1] - 1
duration = tsStop - tsStart
# Read data
nWaveforms = np.sum(h5In['/I/%s/waveformsProcessed'%time][...])
sPTAccept = np.sum(h5In['/I/%s/sPeaksPTDist'%time][...][:3])
bPTAccept = np.sum(h5In['/I/%s/bPeaksPTDist'%time][...][:3])
muonHits = len(h5In['/I/%s/muonHits'%time][...])
linGates = np.sum(h5In['/I/%s/linearGates'%time][...])
overflows = np.sum(h5In['/I/%s/overflows'%time][...])
# Integrate the total power on target between the start and stop time
cutPowerTimes = (timeData>=tsStart) * (timeData<=tsStop)
beamOnSeconds = np.sum(powerData[cutPowerTimes] >= 5e-5)
beamOffSeconds = (tsStop - tsStart + 1) - beamOnSeconds
beamPower = np.sum(powerData[cutPowerTimes])/3600.0 # In MWhr
dOut['time'].append(tsStart)
dOut['sPT'].append(1.0*sPTAccept/nWaveforms)
dOut['bPT'].append(1.0*bPTAccept/nWaveforms)
dOut['muon'].append(1.0*muonHits/nWaveforms)
dOut['linGate'].append(1.0*linGates/nWaveforms)
dOut['overflow'].append(1.0*overflows/nWaveforms)
dOut['power'].append(beamPower)
dOut['duration'].append(duration)
dOut['beamOnDuration'].append(beamOnSeconds)
dOut['beamOffDuration'].append(beamOffSeconds)
h5In.close()
for key in ['time','sPT','bPT','muon','linGate','overflow','power','duration','beamOnDuration','beamOffDuration']:
if '/%s/%s'%(run,key) in h5Out:
del h5Out['/%s/%s'%(run,key)]
h5Out.create_dataset('/%s/%s'%(run,key),data=dOut[key])
h5Out.close()
h5Power.close()
|
gandrewstone/yadog | PyHtmlGen/menu.py | Python | gpl-3.0 | 8,114 | 0.055829 | from gen import *
from chunk import *
from document import *
from attribute import *
from template import *
import pdb
import copy
from types import *
#class MenuItem:
# def __init__(self, text, whenclicked):
# Send a list of items, this wraps it in code that will change the foreground color when the mouse goees over it.
# the list of items is either just the item or (item,onclickaction)
def activeHighlightItems(items,onCol,offCol):
result = []
if type(items[0]) == type(()):
#print "PAIR", items[0][1]
result = [ active(Span(x[0]),setAttr("style","color: %s" % onCol),setAttr("style","color: %s" % offCol), [x[1]] ) for x in items]
else:
result = [ active(Span(x),setAttr("style","color: %s" % onCol),setAttr("style","color: %s" % offCol) ) for x in items]
print result
return result
def activeStyleItemsSpan(items,onStyle,offStyle,func=setAttr):
result = []
if type(items[0]) == type(()):
#print "PAIR", items[0][1]
result = [ active(Span(x[0]),func("style",onStyle),func("style",offStyle), [x[1]] ) for x in items]
else:
result = [ active(Span(x),func("style",onStyle),func("style",offStyle) ) for x in items]
print result
return result
def activeStyleItems(items,onStyle,offStyle,func=setAttr):
result = []
for i in items:
if type(i) == type(()):
shown = i[0]
lnk = [i[1]]
else:
shown = i
lnk = None
if isInstanceOf(i,ChunkBase): wrapper = shown
else: wrapper = Span(shown)
result.append(active(wrapper,func("style",onStyle),func("style",offStyle), lnk ))
#print result
return result
class VerticalList(Block):
def __init__(self, items,selAttr=None,myId="VerticalList"):
Block.__init__(self,myId,None,None,"relative")
self.suppressBody = true # Turn off the Block's automatic div creation because we have another block
self.sel = selAttr
self.lst = chunkBuffer()
self.menu = chunkTag(["div","ul"],self.lst,('id',self.id))
for i in items: #(text, item) in items:
if type(i) is TupleType: # Allow either (text,item) or just text
text = i[0]
item = i[1]
else:
text = i
item = None
tg = chunkTag("li",text)
if isInstanceOf(text,Template):
text.apply(tg)
self.lst.append(tg)
if type(item) == type([]):
self.lst.append(VerticalList(item,copy.deepcopy(self.sel)))
if self.sel:
print "selAttr ", self.sel
tmp = self.lst[0]
print self.lst[0]
self.lst[0] = copy.deepcopy(self.sel)
self.lst[0].setrec(tmp) # For now, put the selection on the first item
def gen(self,doc):
Block.gen(self,doc)
genDoc(doc, self.menu)
class VerticalMenu(Block):
def __init__(self, items,selAttr=None,submenufn=None,itemfn=None,myId="VerticalMenu",depth=0 ):
Block.__init__(self,myId,None,None,"relative")
self.submenufn = submenufn i | f submenufn else lambda x | ,y: y
self.itemfn = itemfn if itemfn else lambda x,y: y
self.suppressBody = True # Turn off the Block's automatic div creation because we have another block
self.sel = selAttr
self.lst = chunkBuffer()
self.menu = chunkTag(["div"],self.lst)
self.menu.id = self.id
for i in items: #(text, item) in items:
if type(i) is TupleType: # Allow either (text,item) or just text
text = i[0]
item = i[1]
else:
text = i
item = None
tg = self.itemfn(depth,text) #chunkTag("center",text)
if isInstanceOf(text,Template):
text.apply(tg)
self.lst.append(tg)
if type(item) == type([]):
v = VerticalMenu(item,copy.deepcopy(self.sel),self.submenufn,self.itemfn,myId+text,depth+1)
self.lst.append(self.submenufn(depth, v))
if self.sel:
print "selAttr ", self.sel
tmp = self.lst[0]
print self.lst[0]
self.lst[0] = copy.deepcopy(self.sel)
self.lst[0].setrec(tmp) # For now, put the selection on the first item
def item(self,idx):
"Return the nth menu item"
return self.lst[idx]
def gen(self,doc):
# Push any attributes and styles set on me to the first generated block, because suppressBody is true
self.menu.attrs.update(self.attrs)
self.menu.styles.update(self.styles)
Block.gen(self,doc)
genDoc(doc, self.menu)
class HorizList(chunkBuffer):
def __init__(self, items,selAttr=None, separator = None,myId=None):
chunkBuffer.__init__(self)
self.id = myId
self.sel = selAttr
count = 0
for i in items: #(text, item) in items:
if type(i) is TupleType: # Allow either (text,item) or just text
text = i[0]
item = i[1]
else:
text = i
item = None
if separator is not None and count>0: self.append(separator)
self.append(text)
if type(item) == type([]): #TODO, add a popup drop down menu
pass
count += 1
# class HorizTable:
def Test():
vdef = VerticalList([('Home',None),('Art',None),('Code',[('C/C++',None),('Python',None)]),('Woodworking',None) ],color(Color(255,0,100),bold(None)), )
vdef1 = VerticalList([(x,None) for x in activeHighlightItems(['Home','Art','Code','Woodworking' ], Color(255,0,200), Color(50,50,50) )] )
v3 = VerticalMenu([('Home',None),('Art',None),('Code',[('C/C++',None),('Python',None)]),('Woodworking',None) ],None,lambda depth,x: resize(-1,x) )
v4 = VerticalMenu([('Home',None),('Art',None),('Code',[('C/C++',None),('Python',None)]),('Woodworking',None) ],None,lambda depth,x: resize(-1,italic(x)) )
ah = lambda x: activeHighlightItems(x,Color(255,0,200), Color(50,50,50))
i = ah(['Home','Art','Code','C/C++','Python','PyHtmlGen','Woodworking'])
v5 = VerticalMenu([(i[0],None),(i[1],None),(i[2],[(i[3],None),(i[4],[(i[5],None)])]),(i[6],None) ],None,lambda depth,x: resize(-1,italic(x)) )
ah = lambda x: activeStyleItems(x,"color:rgb(0,255,0); font-weight: bold; text-transform: uppercase;", "color:rgb(0,0,0)")
i = ah(['Home','Art','Code','C/C++','Python','PyHtmlGen','Woodworking'])
v6 = VerticalMenu([(i[0],None),(i[1],None),(i[2],[(i[3],None),(i[4],[(i[5],None)])]),(i[6],None) ],None,lambda depth,x: resize(-1,italic(x)) )
ah = lambda x: activeStyleItems(x,"color:rgb(0,255,0); font-size: 150%; ", "")
i = ah(['Home','Art','Code','C/C++','Python','PyHtmlGen','Woodworking'])
v7 = VerticalMenu([(i[0],None),(i[1],None),(i[2],[(i[3],None),(i[4],[(i[5],None)])]),(i[6],None) ],None,lambda depth,x: resize(-1,italic(x)) )
# I can add a link by specifying an "onclick" action for each item
ah = lambda x: activeStyleItems([(y,jsLink("testmenuregress.html")) for y in x],"color:rgb(0,0,255); padding: 5px; border: solid; border-width: thin; width: 95%", "")
i = ah(['Home','Art','Code','C/C++','Python','PyHtmlGen','Woodworking'])
v8 = VerticalMenu([(i[0],None),(i[1],None),(i[2],[(i[3],None),(i[4],[(i[5],None)])]),(i[6],None) ],None,lambda depth,x: resize(-1,italic(x)) )
# I can add a link using an anchor
ah = lambda x: activeStyleItems([anchor("testmenuregress.html",y) for y in x] ,"background:rgb(50,50,50); color:white;", "")
i = ah(['Home','Art','Code','C/C++','Python','PyHtmlGen','Woodworking'])
v9 = VerticalMenu([(i[0],"{url home}"),(i[1],None),(i[2],[(i[3],None),(i[4],[(i[5],None)])]),(i[6],None) ],None,lambda depth,x: resize(-1,italic(x)) )
hdef2 = HorizList(
[(x,None) for x in activeHighlightItems(['Home', 'Art', 'Code', 'Woodworking' ],
Color(255,0,200),
Color(50,50,50) )],color(Color(255,0,100),bold(None))
)
# hdef2 = HorizList(color(Color(255,0,100),bold(None)),[(x,None) for x in ['Home','Art','Code','Woodworking' ]] )
mall = ChunkBuffer([vdef,vdef1,v3,hdef2,v4,v5,v6,v7,v8,v9])
doc = HtmlSkel()
mall.gen(doc)
doc.Insert([chunkStr(BR+BR+"\n\n\n")],doc.body,Before)
print str(doc)
file = open('testmenuregress.html','w')
file.write(str(doc))
file.close
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.