repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
shannpersand/cooper-type | workshops/Python Workshop/Just/2016-06-21 Cooper workshop day 2/14 circle function.py | Python | cc0-1.0 | 416 | 0.004808 | def circle(cx, cy, diameter):
radius = diameter / 2
oval(cx - radius, | cy - radius, diameter, diameter)
# diameter = 254
# radius = diameter / 2
# cx, cy = (420, 532)
# oval(cx - radius, cy - radius, diameter, diameter)
circle(420, 532, 254)
# diameter = 154
# rad | ius = diameter / 2
# cx, cy = (728, 414)
# oval(cx - radius, cy - radius, diameter, diameter)
circle(728, 414, 154)
circle(510, 258, 306)
|
levlaz/braindump | app/model/base.py | Python | mit | 304 | 0 | from datetime import datetime
from app import db
class Base(db.Model):
__abstract | __ = True
id = db.Column(db.Integer, primary_key=True)
created_date = db.Column(db.DateTime, index=True, default=datetime.utcnow)
updated_date = db.Column(db.DateTime | , index=True, default=datetime.utcnow)
|
certsocietegenerale/FIR | fir_artifacts_enrichment/tasks.py | Python | gpl-3.0 | 832 | 0.001202 | from abuse_finder import domain_abuse, ip_abuse, email_abuse, url_abuse
from fir_artifacts.models import Artifact
from fir_celery.celeryconf import celery_app
ENRICHMENT_FUNCTIONS = {
'hostname': domain_abuse,
'ip': ip_abuse,
'email': email_abuse,
'url': url_abuse
}
@celery_app.task
def enrich_artifact(artifact_id):
artifact = Artifact.objects.get(pk=artifact | _id)
print("Enrichment for {}".format(artifact.value))
if artifact.type in ENRICHMENT_FUNCTIONS:
results = ENRICHMENT_FUNCTIONS[artifact.type](artifact.value)
enrichment = ArtifactEnrichment(
artifact=artifact,
name='\n'.join(results['names']),
email='; '.join(results['abuse']),
raw=resul | ts['raw']
)
enrichment.save()
from fir_artifacts_enrichment.models import ArtifactEnrichment
|
ktsstudio/tornkts | tornkts/mixins/validate_mixin.py | Python | mit | 578 | 0.00173 | from mongoengine import ValidationError
from tornkts.base.server_response import ServerError
class ValidateMixin(object):
def validate_model(self, prefix=None):
if prefix is not None:
p | refix += '.'
else:
prefix = ''
try:
self.valida | te()
except ValidationError as e:
field = e.errors.keys().pop()
raise ServerError(ServerError.INVALID_PARAMETER,
field=prefix + str(field),
field_problem=ServerError.FIELD_INVALID_FORMAT) |
theopencorps/theopencorps | theopencorps/endpoints/github.py | Python | agpl-3.0 | 8,932 | 0.003583 | """
Lightweight wrapper around the GitHub API for use in GAE
"""
__copyright__ = """
Copyright (C) 2016 Potential Ventures Ltd
This file is part of theopencorps
<https://github.com/theopencorps/theopencorps/>
"""
__license__ = """
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import base64
import json
from theopencorps.endpoints import APIEndpointBase, HTTPException, cache
class GithubEndpoint(APIEndpointBase):
_endpoint = "https://api.github.com"
_accept = "application/vnd.github.v3+json"
def __init__(self, token=None):
APIEndpointBase.__init__(self)
self.token = token
self.log.info("Created endpoint with token %s", repr(token))
@property
@cache
def user(self):
"""
Get the currently logged in user
"""
return json.loads(self.request("/user").content)
def get_repos(self):
username = self.user["login"]
self.log.info("Fetching repos for %s", username)
return json.loads(self.request("/users/%s/repos" % username).content)
def get_repo_async(self, user, repo):
return self.request_async("/repos/%s/%s" % (user, repo))
def get_repo(self, user, repo):
response = self.request("/repos/%s/%s" % (user, repo))
if response.status_code != 200:
raise HTTPException("Attempt to retrieverepo info %s/%s returned %d (%s)",
user, repo,
response.status_code, response.content)
return json.loads(response.content)
def get_file(self, user, repo, path):
response = self.request("/repos/%s/%s/contents/%s" % (user, repo, path))
if response.status_code != 200:
raise HTTPException("Attempt to retrieve %s/%s/%s returned %d (%s)",
user, repo, path,
response.status_code, response.content)
response = json.loads(response.content)
assert response['encoding'] == "base64"
return base64.b64decode(response['content'])
def fork(self, user, repo, organisation="", block=True):
"""
Fork a repo
If block, wait until the new repository is available and
return the new repository information
"""
if not block:
raise NotImplementedError("Haven't implemented non-blocking fork yet")
if organisation:
payload = json.dumps({"organization": organisation})
fullname = "%s/%s" % (organisation, repo)
else:
payload = None
fullname = "%s/%s" % (self.user["login"], repo)
result = self.request("/repos/%s/%s/forks" % (user, repo),
method="POST",
payload=payload)
if result.status_code != 202:
raise HTTPException("Attempt to create fork of %s/%s returned %d (%s)",
user, repo,
result.status_code, result.content)
self.log.info("Forking %s/%s to %s returned %d",
user, repo, fullname, result.status_code)
return json.loads(result.content)
# pylint: disable=too-many-arguments
def create_webhook(self, user, repo, url,
events=("push",), secret="bingo", insecure=True):
"""
Create a webhook on a given repository
"""
payload = {
"name" : "web",
"active": True,
"events": events,
"config": {
"url": url,
"content_type": "json",
"secret": secret,
}
}
# Currenyly github doesn't seem to like let's encrypt
if insecure:
payload["config"]["insecure_ssl"] = "1"
response = self.request("/repos/%s/%s/hooks" % (user, repo),
method="POST",
payload=json.dumps(payload))
if response.status_code != 201:
raise HTTPException("Attempt to create webhook on %s/%s returned %d (%s)",
user, repo,
response.status_code, response.content)
return True
def get_head(self, user, repo, branch='master'):
"""
Find the SHA1 of the tip of selected branch
"""
response = self.request("/repos/%s/%s/git/refs/heads/%s" % (user, repo, branch))
if response.status_code != 200:
return None
current = json.loads(response.content)
return current["object"]["sha"]
# pylint: disable=too-many-arguments
def commit_file(self, user, repo, path, content, message,
branch='master'):
"""
Commit a file
path | (str) path to file
content (str) file contents
message (str) commit message
"""
# Find the SHA1 of the existing f | ile, if it exists
response = self.request("/repos/%s/%s/contents/%s" % (user, repo, path),
payload=json.dumps({
"path" : path,
"ref" : branch}))
if response.status_code == 404:
sha1 = None
else:
current = json.loads(response.content)
sha1 = current['sha']
parameters = {
"path" : path,
"message" : message,
"branch" : branch,
"content" : base64.b64encode(content),
"committer": {
"name" : self.user['name'],
"email" : self.user['email'],
},
}
if sha1 is not None:
parameters['sha'] = sha1
response = self.request("/repos/%s/%s/contents/%s" % (user, repo, path),
payload=json.dumps(parameters),
method="PUT")
if sha1 is None:
return response.status_code == 201
return response.status_code == 200
# pylint: disable=too-many-arguments
def cherry_pick(self, user, repo, sha1, branch="master", force=False):
"""
Cherry pick an sha1 onto user/repo
Returns the sha if something was merged
or False if not
"""
result = self.request("/repos/%s/%s/git/refs/heads/%s" % (user, repo, branch),
method="PATCH",
payload=json.dumps({"sha": sha1, "force": force}))
msg = "%s/%s <- %s" % (user, repo, sha1)
if result.status_code == 200:
self.log.info("Cherry-picked %s", msg)
return sha1
raise HTTPException("Cherry-pick failed: %s (%d)", msg, result.status_code)
def merge(self, user, repo, sha1, base="master"):
"""
Merge an sha1 into user/repo
Returns the sha if something was merged
or False if not
"""
result = self.request("/repos/%s/%s/merges" % (user, repo),
method="POST",
payload=json.dumps({
"base" : base,
"head" : sha1}))
msg = "%s/%s <- %s" % (user, repo, sha1)
mapping = {201: "successful", 202: "accepted", 204: "no-op"}
if result.status_code in mapping:
self.log.info("Merge %s (%s)", mapping[result.status_code], msg)
sha = ""
|
reinout/django | tests/migrations/test_optimizer.py | Python | bsd-3-clause | 24,806 | 0.002177 | from django.db import migrations, models
from django.db.migrations import operations
from django.db.migrations.optimizer import MigrationOptimizer
from django.test import SimpleTestCase
from .models import EmptyManager, UnicodeModel
class OptimizerTests(SimpleTestCase):
"""
Tests the migration autodetector.
"""
def optimize(self, operations, app_label):
"""
Handy shortcut for getting results + number of loops
"""
optimizer = MigrationOptimizer()
return o | ptimizer.optimize | (operations, app_label), optimizer._iterations
def assertOptimizesTo(self, operations, expected, exact=None, less_than=None, app_label=None):
result, iterations = self.optimize(operations, app_label)
result = [repr(f.deconstruct()) for f in result]
expected = [repr(f.deconstruct()) for f in expected]
self.assertEqual(expected, result)
if exact is not None and iterations != exact:
raise self.failureException(
"Optimization did not take exactly %s iterations (it took %s)" % (exact, iterations)
)
if less_than is not None and iterations >= less_than:
raise self.failureException(
"Optimization did not take less than %s iterations (it took %s)" % (less_than, iterations)
)
def assertDoesNotOptimize(self, operations, **kwargs):
self.assertOptimizesTo(operations, operations, **kwargs)
def test_single(self):
"""
The optimizer does nothing on a single operation,
and that it does it in just one pass.
"""
self.assertOptimizesTo(
[migrations.DeleteModel("Foo")],
[migrations.DeleteModel("Foo")],
exact=1,
)
def test_create_delete_model(self):
"""
CreateModel and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.DeleteModel("Foo"),
],
[],
)
def test_create_rename_model(self):
"""
CreateModel should absorb RenameModels.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
),
migrations.RenameModel("Foo", "Bar"),
],
[
migrations.CreateModel(
"Bar",
[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
)
],
)
def test_rename_model_self(self):
"""
RenameModels should absorb themselves.
"""
self.assertOptimizesTo(
[
migrations.RenameModel("Foo", "Baa"),
migrations.RenameModel("Baa", "Bar"),
],
[
migrations.RenameModel("Foo", "Bar"),
],
)
def test_create_alter_model_options(self):
self.assertOptimizesTo(
[
migrations.CreateModel('Foo', fields=[]),
migrations.AlterModelOptions(name='Foo', options={'verbose_name_plural': 'Foozes'}),
],
[
migrations.CreateModel('Foo', fields=[], options={'verbose_name_plural': 'Foozes'}),
]
)
def _test_create_alter_foo_delete_model(self, alter_foo):
"""
CreateModel, AlterModelTable, AlterUniqueTogether/AlterIndexTogether/
AlterOrderWithRespectTo, and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.AlterModelTable("Foo", "woohoo"),
alter_foo,
migrations.DeleteModel("Foo"),
],
[],
)
def test_create_alter_unique_delete_model(self):
self._test_create_alter_foo_delete_model(migrations.AlterUniqueTogether("Foo", [["a", "b"]]))
def test_create_alter_index_delete_model(self):
self._test_create_alter_foo_delete_model(migrations.AlterIndexTogether("Foo", [["a", "b"]]))
def test_create_alter_owrt_delete_model(self):
self._test_create_alter_foo_delete_model(migrations.AlterOrderWithRespectTo("Foo", "a"))
def _test_alter_alter_model(self, alter_foo, alter_bar):
"""
Two AlterUniqueTogether/AlterIndexTogether/AlterOrderWithRespectTo
should collapse into the second.
"""
self.assertOptimizesTo(
[
alter_foo,
alter_bar,
],
[
alter_bar,
],
)
def test_alter_alter_table_model(self):
self._test_alter_alter_model(
migrations.AlterModelTable("Foo", "a"),
migrations.AlterModelTable("Foo", "b"),
)
def test_alter_alter_unique_model(self):
self._test_alter_alter_model(
migrations.AlterUniqueTogether("Foo", [["a", "b"]]),
migrations.AlterUniqueTogether("Foo", [["a", "c"]]),
)
def test_alter_alter_index_model(self):
self._test_alter_alter_model(
migrations.AlterIndexTogether("Foo", [["a", "b"]]),
migrations.AlterIndexTogether("Foo", [["a", "c"]]),
)
def test_alter_alter_owrt_model(self):
self._test_alter_alter_model(
migrations.AlterOrderWithRespectTo("Foo", "a"),
migrations.AlterOrderWithRespectTo("Foo", "b"),
)
def test_optimize_through_create(self):
"""
We should be able to optimize away create/delete through a create or delete
of a different model, but only if the create operation does not mention the model
at all.
"""
# These should work
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Bar"),
migrations.DeleteModel("Foo"),
],
[],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
migrations.DeleteModel("Bar"),
],
[],
)
# This should not work - FK should block it
self.assertDoesNotOptimize(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]),
migrations.DeleteModel("Foo"),
],
)
# The same operations should be optimized if app_label is specified and
# a FK references a model from the other app.
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("other", |
ken0-1n/mutation_util | tests/test_mutation_filter.py | Python | gpl-3.0 | 9,110 | 0.014709 | #! /usr/bin/env python
import sys
import unittest
import os, tempfile, shutil, filecmp
import subprocess
from mutation_util import mutation_filter as mf
from mutation_util import genomon_header_info
class TestMutationFilter(unittest.TestCase):
######################################
# Tumor/Normal Pair, Annoformat
######################################
def test1_1(self):
cur_dir = os.path.dirname(os.path.abspath(__file__))
"--fish_pval 1.0 --realign_pval 1.0 --eb_pval 4.0 --tcount 4 --ncount 2"
ghi = genomon_header_info.Genomon_header_info()
input = cur_dir + "/../data/5929_small_realignment_result_test1.txt"
output = cur_dir + "/../data/5929_small_result_test1_1.txt"
ebpval = 4.0
fishpval = 1.0
realignpval = 1.0
tcount = 4
ncount = 2
post10q = 100
r_post10q = 100
v_count = 100
hotspot_database = ""
mf.filter_mutation_list( \
input, \
output, \
ebpval, \
fishpval, \
realignpval, \
tcount, \
ncount, \
post10q, \
r_post10q, \
v_count, \
hotspot_database, \
ghi)
answer_file = cur_dir + "/../data/5929_small_result_answer_test1_1.txt"
self.assertTrue(filecmp.cmp(output, answer_file, shallow=False))
def test1_2(self):
cur_dir = os.path.dirname(os.path.abspath(__file__))
"--fish_pval 1.0 --realign_pval 1.0 --eb_pval 4.0 --tcount 4 --ncount 2"
ghi = genomon_header_info.Genomon_header_info()
input = cur_dir + "/../data/5929_small_realignment_result_test1.2.txt"
output = cur_dir + "/../data/5929_small_result_test1_2.txt"
ebpval = 4.0
fishpval = 1.0
realignpval = 1.0
tcount = 4
ncount = 2
post10q = 100
r_post10q = 100
v_count = 100
hotspot_database = ""
mf.filter_mutation_list( \
input, \
output, \
ebpval, \
fishpval, \
realignpval, \
tcount, \
ncount, \
post10q, \
r_post10q, \
v_count, \
hotspot_database, \
ghi)
answer_file = cur_dir + "/../data/5929_small_result_answer_test1_2.txt"
self.assertTrue(filecmp.cmp(output, answer_file, shallow=False))
def test2_1(self):
cur_dir = os.path.dirname(os.path.abspath(__file__))
"--post10q 0.1 --r_post10q 0.1 --count 4"
ghi = genomon_header_info.Genomon_header_info()
input = cur_dir + "/../data/5929_small_realignment_result_test2.txt"
output = cur_dir + "/../data/5929_small_result_test2_1.txt"
ebpval = 100
fishpval = 100
realignpval = 100
tcount = 100
ncount = 100
post10q = 0.1
r_post10q = 0.1
v_count = 4
hotspot_database = ""
mf.filter_mutation_list( \
input, \
output, \
ebpval, \
fishpval, \
realignpval, \
tcount, \
ncount, \
post10q, \
r_post10q, \
v_count, \
hotspot_database, \
ghi)
answer_file = cur_dir + "/../data/5929_small_result_answer_test2_1.txt"
self.assertTrue(filecmp.cmp(output, answer_file, shallow=False))
def test2_2(self):
cur_dir = os.path.dirname(os.path.abspath(__file__))
"--post10q 0.1 --r_post10q 0.1 --count 4"
ghi = genomon_header_info.Genomon_header_info()
input = cur_dir + "/../data/5929_small_realignment_result_test2.2.txt"
output = cur_dir + "/../data/5929_small_result_answer_test2_2.txt"
ebpval = 100
fishpval = 100
realignpval = 100
tcount = 100
ncount = 100
post10q = 0.1
r_post10q = 0.1
v_count = 4
hotspot_database = ""
mf.filter_mutation_list( \
input, \
output, \
ebpval, \
fishpval, \
realignpval, \
tcount, \
ncount, \
post10q, \
r_post10q, \
v_count, \
hotspot_database, \
ghi)
answer_file = cur_dir + "/../data/5929_small_result_answer_test2_2.txt"
self.assertTrue(filecmp.cmp(output, answer_file, shallow=False))
######################################
# Tumor/Normal Pair, VCF format
######################################
def test3_1(self):
cur_dir = os.path.dirname(os.path.abspath(__file__))
"--fish_pval 1.0 --realign_pval 1.0 --eb_pval 4.0 --tcount 4 --ncount 2"
ghi = genomon_header_info.Genomon_header_info()
input = cur_dir + "/../data/5929_small_realignment_result_test3.txt"
output = cur_dir + "/../data/5929_small_result_test3_1.txt"
ebpval = 4.0
fishpval = 1.0
realignpval = 1.0
tcount = 4
ncount = 2
post10q = 100
r_post10q = 100
sample1 = "5929_tumor"
sample2 = "5929_control"
mf.filter_mutation_vcf( \
input, \
output, \
ebpval, \
fishpval, \
realignpval, \
tcount, \
ncount, \
post10q, \
r_post10q, \
sample1, \
sample2, \
ghi)
answer_file = cur_dir + "/../data/5929_small_resul | t_answer_test3_1.txt"
self.assertTrue(filecmp.cmp(output, answer_file, shallow=False))
def test3_2(self):
cur_dir = os.path.dirname(os.path.abspath(__file__))
| "--fish_pval 1.0 --realign_pval 1.0 --eb_pval 4.0 --tcount 4 --ncount 2"
ghi = genomon_header_info.Genomon_header_info()
input = cur_dir + "/../data/5929_small_realignment_result_test3.2.txt"
output = cur_dir + "/../data/5929_small_result_test3_2.txt"
ebpval = 4.0
fishpval = 1.0
realignpval = 1.0
tcount = 4
ncount = 2
post10q = 100
r_post10q = 100
sample1 = "5929_tumor"
sample2 = "5929_control"
mf.filter_mutation_vcf( \
input, \
output, \
ebpval, \
fishpval, \
realignpval, \
tcount, \
ncount, \
post10q, \
r_post10q, \
sample1, \
sample2, \
ghi)
answer_file = cur_dir + "/../data/5929_small_result_answer_test3_2.txt"
self.assertTrue(filecmp.cmp(output, answer_file, shallow=False))
def test4_1(self):
cur_dir = os.path.dirname(os.path.abspath(__file__))
"--post10q 0.1 --r_post10q 0.1 --count 4"
ghi = genomon_header_info.Genomon_header_info()
input = cur_dir + "/../data/5929_small_realignment_result_test4.txt"
output = cur_dir + "/../data/5929_small_result_test4_1.txt"
ebpval = 100
fishpval = 100
realignpval = 100
tcount = 4
ncount = 100
post10q = 0.1
r_post10q = 0.1
sample1 = "5929_tumor"
sample2 = None
mf.filter_mutation_vcf( \
input, \
output, \
ebpval, \
fishpval, \
realignpval, \
tcount, \
ncount, \
post10q, \
r_post10q, \
sample1, \
sample2, \
ghi)
answer_file = cur_dir + "/../data/5929_small_result_test4_1.txt"
self.assertTrue(filecmp.cmp(output, answer_file, shallow=False))
def test4_2(self):
cur_dir = os.path.dirname(os.path.abspath(__file__))
"--post10q 0.1 --r_post10q 0.1 --count 4"
ghi = genomon_header_info.Genomon_header_info()
input = cur_dir + "/../data/5929_small_realignment_result_test4.2.txt"
output = cur_dir + "/../data/5929_small_result_test4_2.txt"
ebpval = 100
fishpval = 100 |
melkamar/gitbot | tests/test_fetch_issues.py | Python | mit | 569 | 0 | from gitbot import github_issues_bot
fetch_issues_ur | l = 'https://api.github.com/repos/{}/issues?sta | te={}'
repository = "melkamar/mi-pyt-test-issues"
def test_fetch_issues(auth_session):
"""
Test if number of issues obtained directly from GitHub API (as JSON)
is equal to the number of parsed Issue objects.
:param auth_session:
:return:
"""
response = auth_session.get(fetch_issues_url.format(repository, 'all'))
issues = github_issues_bot.fetch_issues(repository, 'all', auth_session)
assert len(issues) == len(response.json())
|
meirwah/st2contrib | packs/softlayer/actions/create_instance.py | Python | apache-2.0 | 1,208 | 0.000828 | from lib.softlayer import SoftlayerBaseAction
class SoftlayerCreateInstance(SoftlayerBaseAction):
def run(self, | name, datacenter, os="DEBIAN_LATEST", domain="example.com", cpus=1, ram=2048,
disk=100, bandwidth=10, local_disk=True, keyname=None):
driver = self._get_driver()
# build the params list to pass to create_node with the proper kwargs
create_params = {"name": name, "ex_datacenter": datacenter,
self.st2_to_libcloud['os']: os,
| self.st2_to_libcloud['domain']: domain,
self.st2_to_libcloud['cpus']: cpus,
self.st2_to_libcloud['disk']: disk,
self.st2_to_libcloud['ram']: ram,
self.st2_to_libcloud['bandwidth']: bandwidth,
self.st2_to_libcloud['local_disk']: local_disk}
if keyname is not None:
create_params[self.st2_to_libcloud['keyname']] = keyname
# create the node
self.logger.info('Creating node...')
node = driver.create_node(**create_params)
self.logger.info('Node successfully created: {}'.format(node))
return node
|
facebook/fbthrift | thrift/lib/py/protocol/exceptions.py | Python | apache-2.0 | 1,433 | 0.000698 | # Copyright (c) Meta Platforms, Inc. and affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def create_ThriftUnicodeDecodeError_from_UnicodeDecodeError(error, field_name):
if isinstance(error, ThriftUnicodeDecodeError):
error.field_names.append(field_name)
return error
return ThriftUnicodeDecodeError(
error.encoding, error.object, error.start, error.end, error.reason, field_name
)
class ThriftUnicodeDecodeError(UnicodeDecodeError):
| def __init__(self, encoding, object, start, end, reason, field_name):
super(ThriftUnicodeDec | odeError, self).__init__(
encoding, object, start, end, reason
)
self.field_names = [field_name]
def __str__(self):
return "{error} when decoding field '{field}'".format(
error=super(ThriftUnicodeDecodeError, self).__str__(),
field="->".join(reversed(self.field_names)),
)
|
ybenitezf/nstock | controllers/org.py | Python | mit | 4,822 | 0 | # -*- coding: utf-8 -*-
if False:
from gluon import current, URL, SQLFORM, redirect
from gluon import IS_NOT_EMPTY, Field, IS_EMAIL
from gluon import IS_NOT_IN_DB
request = current.request
response = current.response
session = current.session
cache = current.cache
T = current.T
from db import db, auth
@auth.requires_login()
def index():
"""
Show the user the organizations he/she can access
"""
query = (db.organization.id > 0)
query &= (
auth.accessible_query('read', db.organization) |
auth.accessible_query('update', db.organization))
orgs = db(query).select(db.organization.ALL)
return locals()
@auth.requires(
auth.has_permission('read', db.organization, request.args(0)) or
auth.has_permission('update', db.organization, request.args(0))
)
def view():
"""
Show th | e list of desks in this org
"""
org = db.organization(request.args(0))
session.org_id = org.id
return locals()
@auth.requires(auth.has_permission('update', db.organization, request.args(0)))
def edit():
org = db.organization(request.args(0))
tbl = db.organization
tbl.users.readable = False
tbl.users.writable = False
tbl.desks.readable = False
tbl.desks.writable = False
tbl.name.requ | ires = [IS_NOT_EMPTY()]
# edit form
form = SQLFORM(db.organization, record=org, showid=False)
if form.process().accepted:
redirect(URL('view', args=[org.id]))
return locals()
@auth.requires(auth.has_permission('update', db.organization, request.args(0)))
def members():
org = db.organization(request.args(0))
if not request.args(1):
fld_email = Field('email', 'string', label=T("Email"))
fld_email.requires = IS_EMAIL()
form = SQLFORM.factory(
fld_email,
formstyle='bootstrap3_inline',
submit_button=T("Add user"),
table_name='members')
if form.process().accepted:
u = db.auth_user(email=form.vars.email)
if u is not None:
# create new share
if u.id in org.users:
form.errors.email = T(
"The user is already in the organization")
else:
user_list = org.users
user_list.insert(0, u.id)
org.update_record(users=user_list)
g_id = auth.user_group(u.id)
auth.add_permission(g_id, 'read', db.organization, org.id)
else:
# no user with that email
response.flash = ""
form.errors.email = T("The user don't exists on this system")
elif request.args(1) == 'delete':
# remove the user on args(2) from the org members list
# TODO: remove else any perms on the org desks
user_to_remove = db.auth_user(request.args(2))
if user_to_remove is not None:
user_list = org.users
user_list.remove(user_to_remove.id)
org.update_record(users=user_list)
# remove perms over the org
auth.del_permission(
auth.user_group(user_to_remove.id),
'read',
db.organization,
org.id)
# remove, also, all rights over the desks in the org.
desk_perms = [
'read_desk', 'update_items', 'push_items', 'update_desk']
for desk_id in org.desks:
for perm in desk_perms:
auth.del_permission(
auth.user_group(user_to_remove.id),
perm,
db.desk,
desk_id
)
redirect(URL('org', 'members', args=[org.id]))
return locals()
@auth.requires_login()
def create():
"""Create a new organization"""
tbl = db.organization
tbl.users.readable = False
tbl.users.writable = False
tbl.desks.readable = False
tbl.desks.writable = False
tbl.name.requires = [
IS_NOT_EMPTY(
error_message=T("Cannot be empty")
),
IS_NOT_IN_DB(
db,
'organization.name',
error_message=T(
"An Organization witch that name is allready in nStock"))]
form = SQLFORM(tbl)
form.add_button(T('Cancel'), URL('index'))
if form.process().accepted:
# add the new organization
g_id = auth.user_group(auth.user.id)
# give the user all perms over this org
auth.add_permission(g_id, 'update', tbl, form.vars.id)
auth.add_permission(g_id, 'read', tbl, form.vars.id)
auth.add_permission(g_id, 'delete', tbl, form.vars.id)
redirect(URL('index'))
return locals()
|
marcellarius/webargscontrib.utils | webargscontrib/utils/validate.py | Python | mit | 2,307 | 0.0013 | __author__ = 'sam'
import webargs
from .string import lowercase, strip
def not_null(value):
"""A validation function that checks that a value isn't None."""
return True if value is not None else False
def not_empty(value):
"""
Check if a value is not empty.
This is a simple check that blocks null or empty strings. It will also
attempt to strip a string value to exclude strings containing only
whitespace.
:param value: A string value to check.
:return: True if the string is not empty or only contains whitespace.
"""
# We want to reject empty strings, or strings just containing whitespace. To do
# this we check the result of the strip() method, if the value has one.
if not value or not strip(value):
return False
else:
return True
def choices(valid_choices, case_sensitive=False):
"""
Create a validation function that will ensure a value is in a list of choices.
A usage example:
``Arg(validate=choices(["foo", "bar"]))
:param valid_choices: A list of valid values for this parameter
:param case_sensitive: If false, any string-like values will be lowercased
before comparison. Default is `False`
:return: A validation function
"""
if not case_sensitive:
valid_choices = [lowercase(c) for c in valid_choices]
def validate_choices(value):
if not case_sensitive:
value = lowercase(value)
| if value in valid_choices:
return True
else:
raise webargs.ValidationError(
"Invalid value %s. | Valid choices are %s" %
(repr(value), repr(valid_choices)))
return validate_choices
def within(min=None, max=None):
"""
Create a validation function to check whether an argument value is within
a specified range (inclusive).
`min` and `max` cannot both be None.
:param min: A lower bound for the value. Optional.
:param max: An upper bound for the value. Optional.
:return: A validation function
"""
if min is None and max is None:
raise ValueError("A min or max value must be specified")
def validate_within(value):
return (min is None or value >= min) and (max is None or value <= max)
return validate_within |
hueyyeng/AssetsBrowser | ui/window/ui_applications_list.py | Python | mit | 7,328 | 0.003412 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'K:\Library\Python\AssetsBrowser\ui\window\applications_list.ui'
#
# Created by: PyQt5 UI code generator 5.14.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_AppListDialog(object):
def setupUi(self, AppListDialog):
AppListDialog.setObjectName("AppListDialog")
AppListDialog.resize(480, 360)
AppListDialog.setMinimumSize(QtCore.QSize(480, 360))
self.verticalLayout = QtWidgets.QVBoxLayout(AppListDialog)
self.verticalLayout.setObjectName("verticalLayout")
self.btnPushLayout = QtWidgets.QHBoxLayout()
self.btnPushLayout.setSizeConstraint(QtWidgets.QLayout.SetFixedSize)
self.btnPushLayout.setSpacing(12)
self.btnPushLayout.setObjectName("btnPushLayout")
self.btnPushAdd = QtWidgets.QPushButton(AppListDialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnPushAdd.sizePolicy().hasHeightForWidth())
self.btnPushAdd.setSizePolicy(sizePolicy)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/resources/plus-circle.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnPushAdd.setIcon(icon)
self.btnPushAdd.setIconSize(QtCore.QSize(30, 30))
self.btnPushAdd.setCheckable(False)
self.btnPushAdd.setDefault(False)
self.btnPushAdd.setFlat(False)
self.btnPushAdd.setObjectName("btnPushAdd")
self.btnPushLayout.addWidget(self.btnPushAdd)
self.btnPushRemove = QtWidgets.QPushButton(AppListDialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnPushRemove.sizePolicy().hasHeightForWidth())
self.btnPushRemove.setSizePolicy(sizePolicy)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/resources/minus-circle.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnPushRemove.setIcon(icon1)
self.btnPushRemove.setIconSize(QtCore.QSize(30, 30))
self.btnPushRemove.setCheckable(False)
self.btnPushRemove.setObjectName("btnPushRemove")
self.btnPushLayout.addWidget(self.btnPushRemove)
self.btnPushClear = QtWidgets.QPushButton(AppListDialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnPushClear.sizePolicy().hasHeightForWidth())
self.btnPushClear.setSizePolicy(sizePolicy)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/resources/x-circle.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnPushClear.setIcon(icon2)
self.btnPushClear.setIconSize(QtCore.QSize(30, 30))
self.btnPushClear.setCheckable(False)
self.btnPushClear.setObjectName("btnPushClear")
self.btnPushLayout.addWidget(self.btnPushClear)
spacerItem = QtWidgets.QSpacerItem(539, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.btnPushLayout.addItem(spacerItem)
self.verticalLayout.addLayout(self.btnPushLayout)
self.tableAppList = QtWidgets.QTableWidget(AppListDialog)
self.tableAppList.setSizeAdjustPolicy(QtWidgets.QAbstractScrollArea.AdjustToContents)
self.tableAppList.setObjectName("tableAppList")
self.tableAppList.setColumnCount(2)
self.tableAppList.setRowCount(1)
item = QtWidgets.QTableWidgetItem()
self.tableAppList.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableAppList.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableAppList.setHorizontalHeaderItem(1, item)
self.verticalLayout.addWidget(self.tableAppList)
self.label = QtWidgets.QLabel(AppListDialog)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
spacerItem1 = QtWidgets.QSpacerItem(20, 15, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
self.verticalLayout.addItem(spacerItem1)
self.btnExportLayout = QtWidgets.QHBoxLayout()
self.btnExportLayout.setSpacing(6)
self.btnExportLayout.setObjectName("btnExportLayout")
self.btnExportCSV = QtWidgets.QPushButton(AppListDialog)
self.btnExportCSV.setEnabled(False)
self.btnExportCSV.setFlat(False)
self.btnExportCSV.setObjectName("btnExportCSV")
self.btnExportLayout.addWidget(self.btnExportCSV)
self.btnExportJSON = QtWidgets.QPushButton(AppListDialog)
self.btnExportJSON.setEnabled(False)
self.btnExportJSON.setObjectName("btnExportJSON")
self.btnExportLayout.addWidget(self.btnExportJSON)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.btnExportLayout.addItem(spacerItem2)
self.verticalLayout.addLayout(self.btnExportLayout)
self.btnDialogBox = QtWidgets.QDialogButtonBox(AppListDialog)
self.btnDialogBox.setOrientation(QtCore.Qt.Horizontal)
self.btnDialogBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.btnDialogBox.setObjectName("btnDialogBox")
self.verticalL | ayout.addWidget(self.btnDialogBox)
self.tableAppList.raise_()
self.btnDialogBox.raise_()
self.label.raise_()
self.retranslateUi(AppListDialog)
self.btnDialogBox.accepte | d.connect(AppListDialog.accept)
self.btnDialogBox.rejected.connect(AppListDialog.reject)
QtCore.QMetaObject.connectSlotsByName(AppListDialog)
def retranslateUi(self, AppListDialog):
_translate = QtCore.QCoreApplication.translate
AppListDialog.setWindowTitle(_translate("AppListDialog", "Applications List"))
self.btnPushAdd.setToolTip(_translate("AppListDialog", "Add Asset Item Format"))
self.btnPushRemove.setToolTip(_translate("AppListDialog", "Remove selected Asset Item Formats"))
self.btnPushClear.setToolTip(_translate("AppListDialog", "Clear all Asset Item Formats"))
item = self.tableAppList.verticalHeaderItem(0)
item.setText(_translate("AppListDialog", "1"))
item = self.tableAppList.horizontalHeaderItem(0)
item.setText(_translate("AppListDialog", "Name"))
item = self.tableAppList.horizontalHeaderItem(1)
item.setText(_translate("AppListDialog", "Application Path"))
self.label.setText(_translate("AppListDialog", "<html><head/><body><p>Use full path for Application Path.</p><p>E.g. <span style=\" font-weight:600;\">C:\\Program Files\\Autodesk\\Maya2020\\bin\\maya.exe</span></p></body></html>"))
self.btnExportCSV.setText(_translate("AppListDialog", " Export to CSV "))
self.btnExportJSON.setText(_translate("AppListDialog", " Export to JSON "))
from . import icons_rc
|
hetica/webeni | main/forms.py | Python | lgpl-3.0 | 531 | 0.039623 | # -*- coding: utf-8 -*-
from django import forms
class StagiaireForms(for | ms.Form):
_choix = ( (1, 'Stagiaire'), (2, 'Classe'),)
choisir = forms.ChoiceField(choices =_choix, widget = forms.RadioSelect, required = True, initial = '1', label = '')
chercher = forms.CharField(max_length=100, required=False, label = '')
def __init__(self, *args, **kwargs):
super(StagiaireForms, self).__init__(*args, **kwargs)
self.fields['chercher'].widget.attrs.update({'autofocus': 'autofocus', 'placeholder': 'Nom, prénom ou cla | sse'})
|
pdyban/dicombrowser | dicomviewer/streamview.py | Python | apache-2.0 | 1,408 | 0.002131 | from .iview import IView
from .iview import IViewModel
from .model import Model
class StreamView(IView):
def __init__(self, stream):
super(StreamView, self).__init__()
self.stream = stream
self.separator = '\t'
def update(self):
if self.viewmodel is None:
raise AttributeError("A viewmodel has not been connected."
"First connect a viewmodel, then call view.update()")
lines = self.viewmodel.items
# compute column width for optimal presentation
col_width = []
for column in range(len(lines[0])):
col_width.append(max(len(line[column]) for line in lines) + 2)
# output to stream
for line in lines:
self.stream.write(self.separator.join(word.ljust(col_width[column]) for column, word in enumerate(line)))
self.stream.write('\n')
class StreamViewModel(IViewModel):
def __init__(self, model, view):
super(StreamViewModel, self).__init__(model, view)
def build(self):
self.items = []
# header line
heade | rs = ['Filename'] + [tag for tag in self.model.select_tags]
self.items.append(headers)
# for each file
for fname in self.model.items:
line = [fname] + [self.model.items[fname][tag] for tag in self | .model.select_tags]
self.items.append(line)
|
squeaky-pl/pystacia | doc/source/image/desaturate.py | Python | mit | 216 | 0 | from os.path import dirname, join
from pystacia import lena
dest = join(dirname(__file__), ' | ../_static/generated')
image = lena(256)
image.desaturate()
image.write(join(dest, 'lena_desa | turate.jpg'))
image.close()
|
xombiemp/CouchPotatoServer | couchpotato/core/downloaders/nzbget.py | Python | gpl-3.0 | 12,455 | 0.005781 | from base64 import standard_b64encode
from datetime import timedelta
import re
import shutil
import socket
import traceback
import xmlrpclib
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
from couchpotato.core.helpers.encoding import ss, sp
from couchpotato.core.helpers.variable import tryInt, md5, cleanHost
from couchpotato.core.logger import CPLog
log = CPLog(__name__)
autoload = 'NZBGet'
class NZBGet(DownloaderBase):
protocol = ['nzb']
rpc = 'xmlrpc'
def download(self, data = None, media = None, filedata = None):
""" Send a torrent/nzb file to the downloader
:param data: dict returned from provider
Contains the release information
:param media: media dict with information
Used for creating the filename when possible
:param filedata: downloaded torrent/nzb filedata
The file gets downloaded in the searcher and send to this function
This is done to have failed checking before using the downloader, so the downloader
doesn't need to worry about that
:return: boolean
One faile returns false, but the downloaded should log his own errors
"""
if not media: media = {}
if not data: data = {}
if not filedata:
log.error('Unable to get NZB file: %s', traceback.format_exc())
return False
log.info('Sending "%s" to NZBGet.', data.get('name'))
nzb_name = ss('%s.nzb' % self.createNzbName(data, media))
rpc = self.getRPC()
try:
if rpc.writelog('INFO', 'CouchPotato connected to drop off %s.' % nzb_name):
log.debug('Successfully connected to NZBGet')
else:
log.info('Successfully connected to NZBGet, but unable to send a message')
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return False
except xmlrpclib.ProtocolError as e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
log.error('Protocol Error: %s', e)
return False
if re.search(r"^0", rpc.version()):
xml_response = rpc.append(nzb_name, self.conf('category'), False, standard_b64encode(filedata.strip()))
else:
xml_response = rpc.append(nzb_name, self.conf('category'), tryInt(self.conf('priority')), False, standard_b64encode(filedata.strip()))
if xml_response:
log.info('NZB sent successfully to NZBGet')
nzb_id = md5(data['url']) # about as unique as they come ;)
couchpotato_id = "couchpotato=" + nzb_id
groups = rpc.listgroups()
file_id = [item['LastID'] for item in groups if item['NZBFilename'] == nzb_name]
confirmed = rpc.editqueue("GroupSetParameter", 0, couchpotato_id, file_id)
if confirmed:
log.debug('couchpotato parameter set in nzbget download')
return self.downloadReturnId(nzb_id)
else:
log.error('NZBGet could not add %s to the queue.', nzb_name)
return False
def test(self):
""" Check if connection works
:return: bool
"""
rpc = self.getRPC()
try:
if rpc.writelog('INFO', 'CouchPotato connected to test connection'):
log.debug('Successfully connected to NZBGet')
else:
log.info('Successfully connected to NZBGet, but unable to send a message')
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return False
except xmlrpclib.ProtocolError as e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
log.error('Protocol Error: %s', e)
return False
return True
def getAllDownloadStatus(self, ids):
""" Get status of all active downloads
:param ids: list of (mixed) downloader ids
Used to match the releases for this downloader as there could be
other downloaders active that it should ignore
:return: list of releases
"""
log.debug('Checking NZBGet download status.')
rpc = self.getRPC()
try:
if rpc.writelog('INFO', 'CouchPotato connected to check status'):
log.debug('Successfully connected to NZBGet')
else:
log.info('Successfully connected to NZBGet, but unable to send a message')
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return []
except xmlrpclib.ProtocolError as e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
log.error('Protocol Error: %s', e)
return []
# Get NZBGet data
try:
status = rpc.status()
groups = rpc.listgroups()
queue = rpc.postqueue(0)
history = rpc.history()
except:
log.error('Failed getting data: %s', traceback.format_exc(1))
return []
release_downloads = ReleaseDownloadList(self)
for nzb in groups:
try:
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
except:
nzb_id = nzb['NZBID']
if nzb_id in ids:
log.debug('Found %s in NZBGet download queue', nzb['NZBFilename'])
timeleft = -1
try:
if nzb['ActiveDownloads'] > 0 and nzb['DownloadRate'] > 0 and not (status['DownloadPaused'] or status['Download2Paused']):
timeleft = str(timedelta(seconds = nzb['RemainingSizeMB'] / status['DownloadRate'] * 2 ^ 20))
except:
pass
release_downloads.append({
'id': nzb_id,
'name': nzb['NZBFilename'],
'original_status': 'DOWNLOADING' if nzb['ActiveDownloads'] > 0 else 'QUEUED',
# Seems to have no native API function for time left. This will return the time left after NZBGet started downloading this item
'timeleft': timeleft,
})
for nzb in queue: # 'Parameters' is not passed in rpc.postqueue
| if nzb['NZBID'] in ids:
log.debug('Found %s in NZBGet postprocessing queue', nzb['NZBFilename'])
release_downloads.append({
'id': nzb['NZB | ID'],
'name': nzb['NZBFilename'],
'original_status': nzb['Stage'],
'timeleft': str(timedelta(seconds = 0)) if not status['PostPaused'] else -1,
})
for nzb in history:
try:
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
except:
nzb_id = nzb['NZBID']
if nzb_id in ids:
log.debug('Found %s in NZBGet history. TotalStatus: %s, ParStatus: %s, ScriptStatus: %s, Log: %s', (nzb['NZBFilename'] , nzb['Status'], nzb['ParStatus'], nzb['ScriptStatus'] , nzb['Log']))
release_downloads.append({
'id': nzb_id,
'name': nzb['NZBFilename'],
'status': 'completed' if 'SUCCESS' in nzb['Status'] else 'failed',
'original_status': nzb['Status'],
'timeleft': str(timedelta(seconds = 0)),
'folder': sp(nzb['DestDir'])
})
return release_downloads
def removeFailed(self, release_download):
log.info('%s failed downloading, deleting...', release_download['name'])
rpc = self.getRPC()
try:
if rpc.writelog('IN |
vvoland/py3status | py3status/modules/kdeconnector.py | Python | bsd-3-clause | 7,909 | 0.000253 | # -*- coding: utf-8 -*-
"""
Display information about your smartphone with KDEConnector.
Configuration parameters:
cache_timeout: how often we refresh this module in seconds (default 30)
device: the device name, you need this if you have more than one device
connected to your PC (default None)
device_id: alternatively to the device name you can set your device id here
(default None)
format: see placeholders below
(default '{name}{notif_status} {bat_status} {charge}%')
format_disconnected: text if device is disconnected
(default 'device disconnected')
low_threshold: percentage value when text is twitch to color_bad
(default 20)
status_bat: text when battery is discharged (default '⬇')
status_chr: text when device is charged (default '⬆')
status_full: text when battery is full (default '☻')
status_no_notif: text when you have no notifications (default '')
status_notif: text when notifications are available (default ' ✉')
Format placeholders:
{bat_status} battery state
{charge} the battery charge
{name} name of the device
{notif_size} number of notifications
{notif_status} shows if a notification is available or not
Color options:
color_bad: Device unknown, unavailable
or battery below low_threshold and not charging
color_degraded: Connected and battery not charging
color_good: Connected and battery charging
i3status.conf example:
```
kdeconnector {
device_id = "aa0844d33ac6ca03"
format = "{name} {battery} ⚡ {state}"
low_battery = "10"
}
```
Requires:
pydbus
kdeconnect
@author Moritz Lüdecke
SAMPLE OUTPUT
{'color': '#00FF00', 'full_text': u'Samsung Galaxy S6 \u2709 \u2B06 97%'}
charging
{'color': '#00FF00', 'full_text': u'Samsung Galaxy S6 \u2B06 97%'}
transition
{'color': '#FFFF00', 'full_text': u'Samsung Galaxy S6 \u2B07 93%'}
not-plugged
{'color': '#FF0000', 'full_text': u'Samsung Galaxy S6 \u2B07 92%'}
disconnected
{'color': '#FF0000', 'full_text': u'device disconnected'}
unknown
{'color': '#FF0000', 'full_text': u'unknown device'}
"""
from pydbus import SessionBus
SERVICE_BUS = 'org.kde.kdeconnect'
INTERFACE = SERVICE_BUS + '.device'
INTERFACE_DAEMON = SERVICE_BUS + '.daemon'
INTERFACE_BATTERY = INTERFACE + '.battery'
INTERFACE_NOTIFICATIONS = INTERFACE + '.notifications'
PATH = '/modules/kdeconnect'
DEVICE_PATH = PATH + '/devices'
UNKNOWN = 'Unknown'
UNKNOWN_DEVICE = 'unknown device'
UNKNOWN_SYMBOL = '?'
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 30
device = None
device_id = None
format = '{name}{notif_status} {bat_status} {charge}%'
format_disconnected = 'device disconnected'
low_threshold = 20
status_bat | = u'⬇'
status_chr = u'⬆'
status | _full = u'☻'
status_no_notif = ''
status_notif = u' ✉'
def post_config_hook(self):
self._dev = None
def _init_dbus(self):
"""
Get the device id
"""
_bus = SessionBus()
if self.device_id is None:
self.device_id = self._get_device_id(_bus)
if self.device_id is None:
return False
try:
self._dev = _bus.get(SERVICE_BUS,
DEVICE_PATH + '/%s' % self.device_id)
except Exception:
return False
return True
def _get_device_id(self, bus):
"""
Find the device id
"""
_dbus = bus.get(SERVICE_BUS, PATH)
devices = _dbus.devices()
if self.device is None and self.device_id is None and len(devices) == 1:
return devices[0]
for id in devices:
self._dev = bus.get(SERVICE_BUS, DEVICE_PATH + '/%s' % id)
if self.device == self._dev.name:
return id
return None
def _get_isTrusted(self):
if self._dev is None:
return False
try:
# New method which replaced 'isPaired' in version 1.0
return self._dev.isTrusted()
except AttributeError:
try:
# Deprecated since version 1.0
return self._dev.isPaired()
except AttributeError:
return False
def _get_device(self):
"""
Get the device
"""
try:
device = {
'name': self._dev.name,
'isReachable': self._dev.isReachable,
'isTrusted': self._get_isTrusted(),
}
except Exception:
return None
return device
def _get_battery(self):
"""
Get the battery
"""
try:
battery = {
'charge': self._dev.charge(),
'isCharging': self._dev.isCharging() == 1,
}
except Exception:
return None
return battery
def _get_notifications(self):
"""
Get notifications
"""
try:
notifications = {
'activeNotifications': self._dev.activeNotifications()
}
except Exception:
return None
return notifications
def _get_battery_status(self, battery):
"""
Get the battery status
"""
if battery['charge'] == -1:
return (UNKNOWN_SYMBOL, UNKNOWN, '#FFFFFF')
if battery['isCharging']:
status = self.status_chr
color = self.py3.COLOR_GOOD
else:
status = self.status_bat
color = self.py3.COLOR_DEGRADED
if not battery['isCharging'] and battery['charge'] <= self.low_threshold:
color = self.py3.COLOR_BAD
if battery['charge'] > 99:
status = self.status_full
return (battery['charge'], status, color)
def _get_notifications_status(self, notifications):
"""
Get the notifications status
"""
if notifications:
size = len(notifications['activeNotifications'])
else:
size = 0
status = self.status_notif if size > 0 else self.status_no_notif
return (size, status)
def _get_text(self):
"""
Get the current metadatas
"""
device = self._get_device()
if device is None:
return (UNKNOWN_DEVICE, self.py3.COLOR_BAD)
if not device['isReachable'] or not device['isTrusted']:
return (self.py3.safe_format(self.format_disconnected,
{'name': device['name']}),
self.py3.COLOR_BAD)
battery = self._get_battery()
(charge, bat_status, color) = self._get_battery_status(battery)
notif = self._get_notifications()
(notif_size, notif_status) = self._get_notifications_status(notif)
return (self.py3.safe_format(self.format,
dict(name=device['name'],
charge=charge,
bat_status=bat_status,
notif_size=notif_size,
notif_status=notif_status)),
color)
def kdeconnector(self):
"""
Get the current state and return it.
"""
if self._init_dbus():
(text, color) = self._get_text()
else:
text = UNKNOWN_DEVICE
color = self.py3.COLOR_BAD
response = {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': text,
'color': color
}
return response
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
|
tonyseek/simple-rbac | rbac/acl.py | Python | mit | 5,469 | 0 | from __future__ import absolute_import
import itertools
__all__ = ["Registry"]
class Registry(object):
"""The registry of access control list."""
def __init__(self):
self._roles = {}
self._resources = {}
self._allowed = {}
self._denied = {}
# to allow additional short circuiting, track roles that only
# ever deny access
self._denial_only_roles = set()
self._children = {}
def add_role(self, role, parents=[]):
"""Add a role or append parents roles to a special role.
All added roles should be hashable.
(http://docs.python.org/glossary.html#term-hashable)
"""
self._roles.setdefault(role, set())
self._roles[role].update(parents)
for p in parents:
self._children.setdefault(p, set())
self._children[p].add(role)
# all roles start as deny-only (unless one of its parents
# isn't deny-only)
if not parents or self._roles_are_deny_only(parents):
self._denial_only_roles.add(role)
def add_resource(self, resource, parents=[]):
"""Add a resource or append parents resources to a special resource.
All added resources should be hashable.
(http://docs.python.org/glossary.html#term-hashable)
"""
self._resources.setdefault(resource, set())
self._resources[resource].update(parents)
def allow(self, role, operation, resource, assertion=None):
"""Add a allowed rule.
The added rule will allow the role and its all children roles to
operate the resource.
"""
assert not role or role in self._roles
assert not resource or resource in self._resources
self._allowed[role, operation, resource] = assertion
# since we just allowed a permission, role and any children aren't
# denied-only
for r in itertools.chain([role], get_family(self._children, role)):
self._denial_only_roles.discard(r)
def deny(self, role, operation, resource, assertion=None):
"""Add a denied rule.
The added rule will deny the role and its all children roles to
operate the resource.
"""
assert not role or role in self._roles
assert not resource or resource in self._resources
self._denied[role, operation, resource] = assertion
def is_allowed(self, role, operation, resource, check_allowed=True,
**assertion_kwargs):
"""Check the permission.
If the access is denied, this method will return False; if the access
is allowed, this method will return True; if there is not any rule
for the access, this method will return None.
"""
assert not role o | r role in self._roles
assert not resource or resource in self._resources
roles = set(get_family(self._roles, role))
operations = {None, operation}
resources = set(get_family(self._resources, resource))
def DefaultAssertion(*args, **kwargs):
return True
is_allowed = None
default_assertion = DefaultAssertion
for permission in itertools.product(roles, operations, | resources):
if permission in self._denied:
assertion = self._denied[permission] or default_assertion
if assertion(self, role, operation, resource,
**assertion_kwargs):
return False # denied by rule immediately
if check_allowed and permission in self._allowed:
assertion = self._allowed[permission] or default_assertion
if assertion(self, role, operation, resource,
**assertion_kwargs):
is_allowed = True # allowed by rule
return is_allowed
def is_any_allowed(self, roles, operation, resource, **assertion_kwargs):
"""Check the permission with many roles."""
is_allowed = None # no matching rules
for i, role in enumerate(roles):
# if access not yet allowed and all remaining roles could
# only deny access, short-circuit and return False
if not is_allowed and self._roles_are_deny_only(roles[i:]):
return False
check_allowed = not is_allowed
# if another role gave access,
# don't bother checking if this one is allowed
is_current_allowed = self.is_allowed(role, operation, resource,
check_allowed=check_allowed,
**assertion_kwargs)
if is_current_allowed is False:
return False # denied by rule
elif is_current_allowed is True:
is_allowed = True
return is_allowed
def _roles_are_deny_only(self, roles):
return all(r in self._denial_only_roles for r in roles)
def get_family(all_parents, current):
"""Iterate current object and its all parents recursively."""
yield current
for parent in get_parents(all_parents, current):
yield parent
yield None
def get_parents(all_parents, current):
"""Iterate current object's all parents."""
for parent in all_parents.get(current, []):
yield parent
for grandparent in get_parents(all_parents, parent):
yield grandparent
|
leemac/JellyfishRss | rss/settings/development.py | Python | mit | 947 | 0.00528 | from rss.settings.base import *
DEBUG = True
ALLOWED_HOSTS = [
]
DATABASES = {
'default': {
| 'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'jellyfish', # leafreader path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': 'postgres',
'PASSWORD': 'test',
'HOST': 'localhost', # Empty for localhost through domain soc | kets or '127.0.0.1' for localhost through TCP.
'PORT': '5432', # Set to empty string for default.
}
}
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = 'http://localhost:8000/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'REPLACE_ME' |
yu-peng/english-pddl-translator | getElement.py | Python | mit | 11,844 | 0.01773 | '''
Created on Dec 7, 2012
@author: yupeng
'''
class ActionGroup:
def __init__(self):
self.Actions = []
self.PPs = []
self.NPs = []
def addAction(self,actionString):
self.Actions.append(actionString)
def addActions(self,actionList):
self.Actions = self.Actions + actionList
def addPP(self,PP):
self.PPs.append(PP)
def addPPs(self,PPList):
self.PPs = self.PPs + PPList
def addNP(self,NP):
self.NPs.append(NP)
def addNPs(self,NPList):
self.NPs = self.NPs + NPList
def getActions(self):
return self.Actions
def getPPs(self):
return self.PPs
def getNPs(self):
return self.NPs
def getString(self):
AGString = 'Action: '
for action in self.Actions:
AGString = AGString + action + '; '
AGString = AGString + ' NPs: '
for NP in self.NPs:
AGString = AGString + NP.getString() + '; '
AGString = AGString + ' | PPs: '
for PP in self.PPs:
AGString = AGString + PP.getString() + '; '
return AGString
def getActionString(self):
ActionString = ''
for action in self.Actions:
ActionString = ActionString + action
return ActionString
def getPPString(self):
PPString = ''
for PP in self.PPs: |
PPString = PPString + '-' + PP.getOriginalString()
return PPString[1:]
def getTargetString(self):
TargetString = ''
for NP in self.NPs:
TargetString = TargetString + '-' + NP.getNounString()
return TargetString[1:]
class NounGroup:
def __init__(self):
self.NPs = []
self.PPs = []
def addPP(self,PP):
self.PPs.append(PP)
def addPPs(self,PPList):
self.PPs = self.PPs + PPList
def addNP(self,NPString):
self.NPs.append(NPString)
def addNPs(self,NPList):
self.NPs = self.NPs + NPList
def getPPs(self):
return self.PPs
def getNPs(self):
return self.NPs
def getString(self):
NGString = 'NounPhrase: '
for NP in self.NPs:
NGString = NGString + NP + '; '
NGString = NGString + ' PPs: '
for PP in self.PPs:
NGString = NGString + PP.getString() + '; '
return NGString
def getNounString(self):
NounString = ''
for NP in self.NPs:
NounString = NounString + '-' + NP
return NounString[1:]
def getPPString(self):
PPString = ''
for PP in self.PPs:
PPString = PPString + '-' + PP.getOriginalString()
return PPString[1:]
class PPGroup:
def __init__(self):
self.Propositions = []
self.NPs = []
self.String = ''
def addProp(self,PPString):
self.Propositions.append(PPString)
def addProps(self,PPList):
self.Propositions = self.Propositions + PPList
def addNP(self,NP):
self.NPs.append(NP)
def addNPs(self,NPList):
self.NPs = self.NPs + NPList
def setString(self,PPString):
self.String = PPString
def getOriginalString(self):
return self.String
def getProps(self):
return self.Propositions
def getNPs(self):
return self.NPs
def getString(self):
PropString = 'Propositions: '
for Proposition in self.Propositions:
PropString = PropString + Proposition + '; '
PropString = PropString + ' NPs: '
for NP in self.NPs:
PropString = PropString + NP.getString() + '; '
return PropString
def getPropString(self):
PropString = ''
for Proposition in self.Propositions:
PropString = PropString+'-'+Proposition
return PropString[1:]
def getAgent(tree):
# Extract the agent from a noun phrase
if (tree.node != 'NP'):
print 'The given substree is not rooted at a NP'
return None
else:
# Determine the number of agents
agent_count = 0
for subtree1 in tree:
if (subtree1.node == 'NP'):
agent_count = agent_count + 1
if (agent_count < 2):
#single agent
agent = ''
for leave in tree.leaves():
agent = agent+'-'+str(leave)
return [agent[1:]]
else:
#multiple agent
agent = []
for subtree1 in tree:
if (subtree1.node == 'NP'):
NewAgent = getAgent(subtree1)
print 'adding ',NewAgent,' to ',agent
if NewAgent != None:
agent = agent + NewAgent
return agent
def getAction(tree):
# Extract the action from a verb phrase
VPList = ['VP','VB','VBD','VBG','VBN','VBP','VBZ']
TerminalList = ['VB','VBD','VBG','VBN','VBP','VBZ']
PPList = ['PP','ADVP','ADVP']
NPList = ['NP']
if not any(tree.node == tag for tag in VPList):
print 'The given substree is not rooted at a verb phrase'
return None
else:
# Determine the number of agents
PPs = []
NPs = []
NewActions = []
ChildrenActions = []
actionList = []
for subtree in tree:
if any(subtree.node == tag for tag in PPList):
PPs.append(getPPforVP(subtree))
elif any(subtree.node == tag for tag in NPList):
NPs = NPs + getTargets(subtree)
elif any(subtree.node == tag for tag in TerminalList):
NewAction = ''
for leave in subtree.leaves():
NewAction = NewAction+' '+str(leave)
NewActions.append(NewAction[1:])
elif subtree.node == 'VP':
ChildrenActions = ChildrenActions + getAction(subtree)
for action in NewActions:
NewAction = ActionGroup()
NewAction.addAction(action)
NewAction.addPPs(PPs)
NewAction.addNPs(NPs)
actionList.append(NewAction)
for action in ChildrenActions:
action.addPPs(PPs)
action.addNPs(NPs)
actionList.append(action)
return actionList
def getTargets(tree):
# Extract the agent from a noun phrase
if (tree.node != 'NP'):
print 'The given substree is not rooted at a NP'
return None
else:
PPs = []
ChildrenTargets = []
targetList = []
for subtree in tree:
if subtree.node == 'NP':
ChildrenTargets = ChildrenTargets + getTargets(subtree)
elif subtree.node == 'PP':
PPs.append(getPPforNP(subtree))
if (len(ChildrenTargets) < 1):
#This is a bottom level NP
TargetString = ''
for leave in tree.leaves():
if leave == 'and' or leave == 'or':
NewTarget = NounGroup()
NewTarget.addNP(TargetString[1:])
targetList.append(NewTarget)
TargetString = ''
else:
TargetString = TargetString+' '+str(leave)
NewTarget = NounGroup()
NewTarget.addNP(TargetStri |
idlesign/makeapp | makeapp/app_templates/pytestplugin/makeappconf.py | Python | bsd-3-clause | 145 | 0 | from makeapp.appconfig import Config
class PytestPluginConfig(Conf | ig):
parent_template | = ['pytest']
makeapp_config = PytestPluginConfig
|
Vitiate/ShellScripts | ElastAlert/elastalert_modules/servicenow_alert.py | Python | gpl-2.0 | 1,758 | 0.008532 | from alerts import Alerter, BasicMatchString
import requests
import json
class ServiceNowAlerter(Alerter):
required_options = set(['username', 'password', 'servicenow_rest_url', 'short_description', 'comments', 'assignment_group', 'category', 'subcategory', 'cmdb_ci', 'caller_id'])
# Alert is called
def alert(self, matches):
for match in matches:
| # Parse everything into description.
description = str(BasicMatchString(self.rule, match))
# Set proper headers
headers = {
"Content-Type":"application/json",
"Accept":"application/json;charset=utf-8"
}
data = {
"description": description,
"short_description": self.rule['short_description'],
"comments": self.rule['comments'],
"assignment_group": | self.rule['assignment_group'],
"category": self.rule['category'],
"subcategory": self.rule['subcategory'],
"cmdb_ci": self.rule['cmdb_ci'],
"caller_id": self.rule["caller_id"]
}
response = requests.post(self.rule['servicenow_rest_url'], auth=(self.rule['username'], self.rule['password']), headers=headers , data=json.dumps(data))
if response.status_code != 201:
print('Status:', response.status_code, 'Headers:', response.headers, 'Error Response:',response.json())
exit()
# get_info is called after an alert is sent to get data that is written back
# to Elasticsearch in the field "alert_info"
# It should return a dict of information relevant to what the alert does
def get_info(self):
return {'type': 'Awesome Alerter',
'SN_description': self.rule['description']} |
tjm-1990/blueking | blueking/component/apis/job.py | Python | gpl-3.0 | 1,608 | 0.004717 | # -*- coding: utf-8 -*-
from ..base import ComponentAPI
class CollectionsJOB(object):
"""Collections of JOB APIS"""
def __init__(self, client):
self.client = client
self.execute_task = ComponentAPI(
client=self.client, method='POST', path='/api/c/compapi/job/execute_task/',
description=u'根据作业模板ID启动作业',
)
self.fast_execute_script = ComponentAPI(
client=self.client, method='POST', path='/api/c/compapi/job/fast_execute_script/',
description=u'快速执行脚本',
)
self.fast_push_file = ComponentAPI(
client=self.client, method='POST', path='/api/c/compapi/job/fast_push_file/',
description=u'快速分发文件',
)
self.get_agent_status = ComponentAPI(
client=self.client, method='POST', path | ='/api/c/compapi/job/get_agent_sta | tus/',
description=u'查询Agent状态',
)
self.get_task_detail = ComponentAPI(
client=self.client, method='GET', path='/api/c/compapi/job/get_task_detail/',
description=u'查询作业模板详情',
)
self.get_task_ip_log = ComponentAPI(
client=self.client, method='GET', path='/api/c/compapi/job/get_task_ip_log/',
description=u'根据作业实例ID查询作业执行日志',
)
self.get_task_result = ComponentAPI(
client=self.client, method='GET', path='/api/c/compapi/job/get_task_result/',
description=u'根据作业实例 ID 查询作业执行状态',
)
|
uyaly/test | utils/location.py | Python | gpl-2.0 | 1,635 | 0.005758 | # coding:utf-8
import csv,xlrd
from selenium import webdriver
import time
file_name = r'D:\PycharmProjects\test_hpk2017\data\testdata.xlsx'
def getCsv(file_name):
rows=[]
with open(file_name, 'rb') as f:
readers = csv.reader(f, delimiter=',', quotechar='|')
next(readers, None)
for row in readers:
rows.append(row)
return rows
def getExcel(rowValue, colValue, file_name):
'''
:param rowValue:表格的行
:param colVal | ue:表格的列
:param file_name:excel文件
:return:
'''
book = xlrd.open_workbook(file_name)
sheet = book.sheet_by_index(0)
return sheet.cell_value(rowValue, colValue)
def getDdExcel(File_name):
rows = []
book = xlrd.open_workbook(File_name)
sheet = book.sheet_by_index(0)
for row in range(1, sheet.nrows):
rows.append(list(sheet.row_valu | es(row, 0, sheet.ncols)))
return rows
def getData(rowValue, colname):
'''
:param File_name: excel文件
:param rowValue: 表格的行
:param element: 表格的列
:return:
'''
book = xlrd.open_workbook(file_name)
sheet = book.sheet_by_index(0)
for colValue in range(0, sheet.ncols):
if sheet.cell_value(0, colValue) == colname:
return sheet.cell_value(rowValue, colValue)
colValue = colValue + 1
# 获取返回的错误信息
def getText(driver):
return driver.find_element_by_xpath("").text
# 执行测试主函数
if __name__ == '__main__':
# print getDdExcel(file_name)
# print getExcel(0, 0, file_name)
# print getData(file_name, 1, "HZ_ZSHY_original")
pass |
SwissTPH/odk_planner | docs/_static/get_gonts.py | Python | gpl-2.0 | 408 | 0.007353 |
import sys, re
x = re.compile(r"^(?P<before>.*local\('(?P<name>.*?)'\), url\()(?P<u | rl>.*?)(?P<after>\).*)$")
css = ''
sh = ''
for line in sys.stdin.readlines():
m = x.match(line)
if m:
gd = m.groupdict()
css += gd['before'] + gd['name'] + '.woff' + gd['after']
sh += 'wget -O %s.woff %s' % (gd['name'], gd['url']) + '\n'
else:
css += line
|
print(css)
print(sh)
|
romanlv/trml2pdf | trml2pdf.py | Python | lgpl-2.1 | 672 | 0 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# trml2pdf - An RML to PDF converter
# Copyright (C) 2003, Fabien Pinckaers, UCL, FSA
# Contributors
# Richard Waid <richard@iopen.net>
# Klaas Freitag <freitag@kde.org>
import os
import sys
import trml2pdf
__help = \
"Usage: tr | ml2pdf input.rml >output.pdf\n\
Render the standard input (RML) and output a PDF file"
def main():
if len(sys.argv) == 1 or sys.argv[1] == '--help':
print(__help)
sys.exit(0)
else:
| # print(parseString(open(sys.argv[1], 'r').read()))
os.write(1, trml2pdf.parseString(open(sys.argv[1], 'rt').read()))
if __name__ == "__main__":
main()
|
kytos/kytos | tests/unit/test_core/test_switch.py | Python | mit | 12,374 | 0 | """Test kytos.core.switch module."""
import asyncio
import json
from datetime import datetime
from unittest import TestCase
from unittest.mock import MagicMock, Mock, patch
from kytos.core import Controller
from kytos.core.config import KytosConfig
from kytos.core.constants import FLOOD_TIMEOUT
from kytos.core.interface import Interface
from kytos.core.switch import Switch
def get_date():
"""Return date with FLOOD_TIMEOUT+1 microseconds."""
return datetime(2000, 1, 1, 0, 0, 0, FLOOD_TIMEOUT+1)
# pylint: disable=protected-access, too-many-public-methods
class TestSwitch(TestCase):
"""Switch tests."""
def setUp(self):
"""Instantiate a controller."""
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
self.options = KytosConfig().options['daemon']
self.controller = Controller(self.options, loop=self.loop)
self.controller.log = Mock()
self.switch = self.create_switch()
@staticmethod
def create_switch():
"""Create a new switch."""
connection = MagicMock()
connection.address = 'addr'
connection.port = 'port'
connection.protocol.version = 0x04
switch = Switch('00:00:00:00:00:00:00:01', connection)
switch._enabled = True
return switch
def test_repr(self):
"""Test repr() output."""
expected_repr = "Switch('00:00:00:00:00:00:00:01')"
self.assertEqual(repr(self.switch), expected_repr)
def test_id(self):
"""Test id property."""
self.assertEqual(self.switch.id, '00:00:00:00:00:00:00:01')
def test_ofp_version(self):
"""Test ofp_version property."""
self.assertEqual(self.switch.ofp_version, '0x04')
def test_ofp_version__none(self):
"""Test ofp_version property when connection is none."""
self.switch.connection = None
self.assertIsNone(self.switch.ofp_version)
def tearDown(self):
"""TearDown."""
self.loop.close()
def test_switch_vlan_pool_default(self):
"""Test default vlan_pool value."""
self.assertEqual(self.options.vlan_pool, {})
def test_switch_vlan_pool_options(self):
"""Test switch with the example from kytos.conf."""
dpid = "00:00:00:00:00:00:00:01"
vlan_pool = {"00:00:00:00:00:00:00:01":
{"1": [[1, 2], [5, 10]], "4": [[3, 4]]}}
self.controller.switches[dpid] = self.switch
self.options.vlan_pool = vlan_pool
self.controller.get_switch_or_create(dpid, self.switch.connection)
port_id = 1
intf = self.controller.switches[dpid].interfaces[port_id]
tag_values = [tag.value for tag in intf.available_tags]
self.assertEqual(tag_values, [1, 5, 6, 7, 8, 9])
port_id = 4
intf = self.controller.switches[dpid].interfaces[port_id]
tag_values = [tag.value for tag in intf.available_tags]
self.assertEqual(tag_values, [3])
# this port number doesn't exist yet.
port_7 = 7
intf = Interface("test", port_7, self.switch)
# no attr filters, so should associate as it is
self.controller.switches[dpid].update_interface(intf)
intf_obj = self.controller.switches[dpid].interfaces[port_7]
self.assertEqual(intf_obj, intf)
# assert default vlan_pool range (1, 4096)
tag_values = [tag.value for tag in intf_obj.available_tags]
self.assertEqual(tag_values, list(range(1, 4096)))
def test_update_description(self):
"""Test update_description method."""
desc = MagicMock()
desc.mfr_desc.value = 'mfr_de | sc'
desc.hw_desc.value = 'hw_desc'
desc.sw_desc.value = 'sw_desc'
desc.serial_num.value = 'serial_num'
desc.dp_desc.value = 'dp_desc'
self.switch.update_description(desc)
self.assertEqual(self.switch.description['manufacturer'], 'mfr_desc')
self.assertEqual(self.switch.description['hardware'], 'hw_desc')
self.assertEqual(self.switch.description | ['software'], 'sw_desc')
self.assertEqual(self.switch.description['serial'], 'serial_num')
self.assertEqual(self.switch.description['data_path'], 'dp_desc')
def test_disable(self):
"""Test disable method."""
interface = MagicMock()
self.switch.interfaces = {"1": interface}
self.switch.disable()
interface.disable.assert_called()
self.assertFalse(self.switch._enabled)
def test_disconnect(self):
"""Test disconnect method."""
self.switch.disconnect()
self.assertIsNone(self.switch.connection)
def test_get_interface_by_port_no(self):
"""Test get_interface_by_port_no method."""
interface_1 = MagicMock(port_number='1')
interface_2 = MagicMock(port_number='2')
self.switch.interfaces = {'1': interface_1, '2': interface_2}
expected_interface_1 = self.switch.get_interface_by_port_no('1')
expected_interface_2 = self.switch.get_interface_by_port_no('3')
self.assertEqual(expected_interface_1, interface_1)
self.assertIsNone(expected_interface_2)
def test_get_flow_by_id(self):
"""Test get_flow_by_id method."""
flow_1 = MagicMock(id='1')
flow_2 = MagicMock(id='2')
self.switch.flows = [flow_1, flow_2]
expected_flow_1 = self.switch.get_flow_by_id('1')
expected_flow_2 = self.switch.get_flow_by_id('3')
self.assertEqual(expected_flow_1, flow_1)
self.assertIsNone(expected_flow_2)
def test_is_connected__true(self):
"""Test is_connected method."""
connection = MagicMock()
connection.is_alive.return_value = True
connection.is_established.return_value = True
self.switch.connection = connection
self.switch.is_active = MagicMock()
self.switch.is_active.return_value = True
self.assertTrue(self.switch.is_connected())
def test_is_connected__not_connection(self):
"""Test is_connected method when connection does not exist."""
self.switch.connection = None
self.switch.is_active = MagicMock()
self.switch.is_active.return_value = True
self.assertFalse(self.switch.is_connected())
def test_is_connected__not_alive(self):
"""Test is_connected method when switch is not active."""
connection = MagicMock()
connection.is_alive.return_value = True
connection.is_established.return_value = True
self.switch.connection = connection
self.switch.is_active = MagicMock()
self.switch.is_active.return_value = False
self.assertFalse(self.switch.is_connected())
def test_update_connection(self):
"""Test update_connection method."""
connection = MagicMock()
self.switch.update_connection(connection)
self.assertEqual(self.switch.connection, connection)
self.assertEqual(self.switch.connection.switch, self.switch)
def test_update_features(self):
"""Test update_features method."""
self.switch.update_features('features')
self.assertEqual(self.switch.features, 'features')
def test_send(self):
"""Test send method."""
self.switch.send('buffer')
self.switch.connection.send.assert_called_with('buffer')
@patch('kytos.core.switch.now', return_value=get_date())
def test_update_lastseen(self, mock_now):
"""Test update_lastseen method."""
self.switch.update_lastseen()
self.assertEqual(self.switch.lastseen, mock_now.return_value)
def test_update_interface(self):
"""Test update_interface method."""
interface = MagicMock(port_number=1)
self.switch.update_interface(interface)
self.assertEqual(self.switch.interfaces[1], interface)
def test_remove_interface(self):
"""Test remove_interface method."""
interface = MagicMock(port_number=1)
self.switch.interfaces[1] = interface
self.switch.remove_interface(interface)
self.assertEqual(self.switch.interfaces, {})
def tes |
TalLinzen/russian-preps | code/python/yandex_capped.py | Python | bsd-3-clause | 2,238 | 0.002241 | # -*- coding: utf-8 -*-
# Author: Tal Linzen <linzen@nyu.edu>
# License: BSD (3-clause)
# Linzen, Kasyanenko, & Gouskova (2013). (Lexical and phonological
# variation in Russian prepositions, Phonology 30(3).)
import re
import random
import time
import sys
import urllib
import urllib2
from gzip import GzipFile
from cStringIO import StringIO
from httptools import FirefoxRequest
class YandexCapped(object):
'''
Send a query to the Yandex search engine (yandex.ru), going straight to
the 100th page of results, and extract the number of matches (if lower
than 1000, then hopefully exact; otherwise it's probably an estimate).
| This is the code used in 2013 paper; it no longer works as of January 2014,
due to some changes in HTML documents generated by the search engine,
though it looks like it would require only minimal adjustments to work.
Usage example:
>>> y = YandexCapped()
>>> y.run('ответов')
'''
results_per_page = 10
max_results = 999
| def __init__(self, delay=0, just_count=True):
self.item_number_re = re.compile(
'<b class="b-serp-item__number">(\d+)</b>')
self.no_results = 'Sorry, there are no results for this search query.'
self.delay = delay
self.just_count = just_count
def run(self, query):
page_number = self.max_results / self.results_per_page
response = self.send_request(query, page_number)
data = self.unzip(response.read())
if self.no_results in data:
return 0
else:
item_numbers = self.item_number_re.findall(data)
assert item_numbers > 0
return max(map(int, item_numbers))
def send_request(self, query, page_number):
d = {'text': query,
'p': page_number,
'lr': 202,
'noreask': 1}
req_str = 'http://yandex.com/yandsearch?' + urllib.urlencode(d)
req = FirefoxRequest(req_str)
time.sleep(self.delay * (1 + random.random()))
return urllib2.urlopen(req)
def unzip(self, s):
return GzipFile(fileobj=StringIO(s)).read()
def extract_links(self, data):
return self.links_re.findall(data)
|
slashdd/sos | sos/cleaner/archives/generic.py | Python | gpl-2.0 | 1,463 | 0 | # Copyright 2020 Red Hat, Inc. Jake Hunsaker <jhunsake@redhat.com>
# This file is part of the sos project: https://github.com/sosreport/sos
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# version 2 of the GNU General Public License.
#
# See the LICENSE file in the source distribution for further information.
from sos.cleaner.archives import SoSObfuscationArchive
import os
import tarfile
class DataDirArchive(SoSObfuscationArchive):
"""A plain directory on the filesystem that is not directly associated with
any known or supported collection utility
"""
type_name = 'data_dir'
description = 'unassociated directory'
@classmethod
def check_is_type(cls, arc_path):
| return os.path.isdir(arc_path)
def set_archive_root(self):
return os.path.abspath(self.archive_path)
class TarballArchive(SoSObfuscationArchive):
"""A generic tar archive that is not associated with any known or supported
collection utility
"""
type_name = 'tarball'
description = 'unassociated tarball'
@classmethod
def check_is_type(cls, arc_path):
try:
return tarfile.is_ | tarfile(arc_path)
except Exception:
return False
def set_archive_root(self):
if self.tarobj.firstmember.isdir():
return self.tarobj.firstmember.name
return ''
|
jvrana/Pillowtalk | tests/test_with_larger_examples/test_sequence_example/test_examples.py | Python | mit | 5,684 | 0.010204 | import pytest
from pillowtalk import *
# TODO: test when where returns [] or None or [None]
# TODO: test update() (or autoupdate?)
@pytest.fixture
def folder_json():
return {'count' : 59,
'created_at' : '2013-10-01T20:07:18+00:00',
'description' : '', 'id': 'lib_pP6d50rJn1',
'modified_at' : '2017-01-20T21:57:55.991758+00:00',
'name' : 'Plasmids',
'owner' : 'ent_A7BlnCcJTU',
'permissions' : {'admin' : True,
'appendable': True,
'owner' : False,
'readable' : True,
'writable' : True},
'sequences' : [
{'id': 'seq_Nv6wYspV', 'name': 'FAR1-mut-87aa-TP'}, {'id': 'seq_0FmHFzJe', 'name': 'pMODT4-pGAL1-attB1-GAVNY'},
{'id': 'seq_usn0K27s', 'name': 'pMODU6-pGALZ4-BleoMX'},
{'id': 'seq_Na2oNxzs', 'name': 'pMODU6-pGALZ4-FAR1-mut-87aa'},
{'id': 'seq_AyQ7ToIn', 'name': 'pBR322 (Sample Sequence)'},
{'id': 'seq_QuWMpfRK', 'name': 'pMODT4-pGAL1-attB1-GVNY'}, {'id': 'seq_K5hwGNwg', 'name': 'pMODU6-pGAL1-BleoMX'},
{'id': 'seq_2rKmILGU', 'name': 'pMODU6-pGAL1-NatMX'}, {'id': 'seq_5HcRWKi8', 'name': 'pMODU6-pGALZ4-P1G1-HygMX'},
{'id': 'seq_tMz0Xv3g', 'name': 'pMODU6-pGAL1-FAR1-L1-IAA17T2'},
{'id': 'seq_k0MuYdIM', 'name': 'pMODU6-pGAL1-IAA17T2-FAR1'},
{'id': 'seq_fkFjzKkb', 'name': 'v63_pGP8zGAL-STE5(-)RING-SNC2 C-term'},
{'id': 'seq_WQ0wqb9f', 'name': 'pMODU6-pGALZ4-iaaH'}, {'id': 'seq_hhI5TTbO', 'name': 'pMODU6-pGAL1-FAR1-IAA17T2'},
{'id': 'seq_beOWphBv', 'name': 'pMODKan-HO-pACT1-ZEV4'},
{'id': 'seq_QteKmJdS', 'name': 'pGPT4-pGAL1-GAVNY_mutated_library'},
{'id': 'seq_w2IZPFzd', 'name': 'pMODOK-pACT1-GAVNY'}, {'id': 'seq_AgQ1w9ak', 'name': 'pLAB2'},
{'id': 'seq_kKtPZ1Rs', 'name': 'pMODT4-pGAL1-P1G1-GAVNY'}, {'id': 'seq_4ccBmI1j', 'name': 'pGPU6-pGAL1-AFB2'},
{'id': 'seq_wHiaXdFM', 'name': 'pGPT4-pGAL1-G(m)AVNY'}, {'id': 'seq_QGfqobtP', 'name': 'pGPT4-pG | AL1-AVNY'},
{'id': 'seq_VazadBJw', 'name': 'pGPT4-pGAL1-GAVNY'}, {'id': 'seq_Qc6f2Kii', 'name': 'pMOD4G-NLS_dCas9_VP64'},
{'id': 'seq_SGfG2YeB', 'name': 'pMODU6-pGALZ4-HygMX'} | , {'id': 'seq_i0Yl6uzk', 'name': 'pMODH8-pGPD-TIR1_DM'},
{'id': 'seq_ri07UntS', 'name': 'pMODU6-pGPD-EYFP'}, {'id': 'seq_F4tEc0XU', 'name': 'pMODU6-pGALZ4-STE5(-)RING'},
{'id': 'seq_qihkmlW4', 'name': 'pMODU6-pGAL1-AlphaFactor'},
{'id': 'seq_2MFFshfl', 'name': 'pYMOD2Kmx_pGAL1-HYG_ZEV4-cassette'},
{'id': 'seq_bw3XWuZU', 'name': 'pMODT4-pGALZ4-AVNY'}, {'id': 'seq_D1iAdKMz', 'name': 'pGPL5G-pGAL1-URA3'},
{'id': 'seq_rzQGBzv2', 'name': 'pGP5G-ccdB'}, {'id': 'seq_9ph0SnJV', 'name': 'AmpR-T4-pGAL1-GAL4DBD-L1'},
{'id': 'seq_PKJNfuZA', 'name': 'pGPH8-pGAL1-GAVNY_v2'}, {'id': 'seq_m42PVReQ', 'name': 'pMODT4-pGALZ4-Z4AVNY'},
{'id': 'seq_5bmPzcKN', 'name': 'pMODU6-pGALZ4-NatMX'}, {'id': 'seq_mfMW58Dd', 'name': 'pGPL5G-pGALZ4-URA3'},
{'id': 'seq_l5VHTc8Z', 'name': 'pGPU6-pGAL1-TIR1_DM'}, {'id': 'seq_tFGIIL0C', 'name': 'pMODU6-pGAL1-FAR1'},
{'id': 'seq_y9xdtVx7', 'name': 'pMODKan-HO-pACT1GEV'}, {'id': 'seq_t77GYXRB', 'name': 'pGPT4-pGAL1-EGFP'},
{'id': 'seq_TWAJLtvz', 'name': 'pMODU6-pGAL1-P1G1-HygMX'}, {'id': 'seq_ztl4dnOW', 'name': 'pLAB1'},
{'id': 'seq_TsTM0B8q', 'name': 'pMOD4-pGAL1Z3(P3)-MF(AL'}, {'id': 'seq_UbsucV1t', 'name': 'pMODU6-pGAL1-HygMX'},
{'id': 'seq_7O7ThYSI', 'name': 'pMODU6-pGALZ4-Z4AVNY'}, {'id': 'seq_iGdjEEx4', 'name': 'pGPT4-pGAL1-P1G1-GEV'},
{'id': 'seq_2xGw2yCj', 'name': 'pGPH8-pGAL1-GAVNY'}, {'id': 'seq_okitCPyx', 'name': 'pGPT4-pGAL1-GAVNY(VP64)'},
{'id': 'seq_rwDoRd9Q', 'name': 'pMODU6-pGALZ4-FAR1'},
{'id': 'seq_f4GgnFdY', 'name': 'pGPT4-pGAL1-GAVNY_seq_verified'},
{'id': 'seq_5AXMlSvB', 'name': 'pYMOD2Kmx_pGAL1-HYG_pGAL1-iaah'},
{'id': 'seq_6VN5FDpP', 'name': 'pMODOK-pACT1-GAVN'}, {'id': 'seq_etTsAfD4', 'name': 'pGPU6-pGALZ4-eYFP'},
{'id': 'seq_IyZI9bEh', 'name': 'pMODU6-pGAL1-FAR1-L1-IAA17T1_opt'}, {'id': 'seq_7yXay7Ep', 'name': 'pGP8G-TIR1-Y'},
{'id': 'seq_GuqSGBXY', 'name': 'pGPT4-pGAL1-GAVNY(VP64) new design'},
{'id': 'seq_vA5dxrqd', 'name': 'pMODU6-pGALZ4-AlphaFactor'}], 'type' : 'ALL'}
def test_folder(folder_json):
class MyBase(PillowtalkBase):
def find(self, *args, **kwargs):
pass
def where(self, *args, **kwargs):
pass
@add_schema
class Folder(MyBase):
items = {}
FIELDS = ["id", "name"]
RELATIONSHIPS = [
Many("sequences", "where Folder.id <> Sequence.folder")
]
@add_schema
class Sequence(MyBase):
items = {}
FIELDS = ["id", "name", "bases"]
RELATIONSHIPS = [
One("folder", "find Sequence.folder <> Folder.id"),
]
f = Folder.load(folder_json)
assert len(f.sequences) > 1 |
lonetwin/botosh | botosh/aws_admin.py | Python | bsd-2-clause | 3,229 | 0.004336 | #!/usr/bin/python
import os
import sys
import cmd
import boto
from utils import info, error, prompt
from utils import green
_context_cache = {}
class AWSAdmin(object, cmd.Cmd):
def __init__(self):
cmd.Cmd.__init__(self)
self.context = None
@property
def _ready(self):
return os.environ.get('AWS_ACCESS_KEY_ID', boto.config.get('Credentials', 'AWS_ACCESS_KEY_ID'))
@property
def region(self):
return self.context.conn.region.name if getattr(self.context.conn, 'region', None) else 'global'
@property
def prompt(self):
return prompt('%s > ' % (
error('bo | to not configured') if not self._ready
else self.context or error('context not set'))
)
def do_set_context(self, context):
""" Set/Switc | h to a different context """
from botosh import available_contexts
available_contexts_str = green(', '.join(available_contexts.keys()))
if not self._ready:
print error("boto has not been configured with sufficient credentials. "
"Please configure boto first")
if not context or not self._ready:
print error('No context provided. Please `set_context` to one of: %s' % available_contexts_str)
elif context in available_contexts:
if context not in _context_cache:
old_region = self.context.region if self.context else None
new_context = available_contexts[context]()
_context_cache[context] = new_context
new_context.context = new_context
if old_region and old_region in new_context._valid_regions:
new_context.conn = new_context.region_switcher(old_region)
_context_cache[context].cmdloop()
else:
print error('Invalid context `%s`. Please `set_context` to one of: %s' % (context, available_contexts_str))
def do_switch_region(self, region):
""" Switch to a different region """
from botosh import available_contexts
if not self._ready:
print error("boto has not been configured with sufficient credentials. "
"Please configure boto first")
if not region:
print error('No region provided.')
if self.context is None:
print error('No context provided. Please `set_context` to one of: %s' % green(', '.join(available_contexts.keys())))
else:
if self.context.region == region:
return
regions = self.context._valid_regions
if region not in regions:
print error('Invalid region `%s`. Please `switch_region` to one of: %s' %
(region, green(', '.join(regions))))
else:
self.context.conn = self.context.region_switcher(region)
def do_quit(self, ignored):
sys.exit(0)
def do_list_contexts(self, ignored):
""" List all available contexts """
from botosh import available_contexts
print "Available contexts:\n%s" % green('\n'.join(available_contexts.keys()))
do_exit = do_quit
|
trunglq7/horizon | openstack_dashboard/dashboards/admin/users/views.py | Python | apache-2.0 | 4,417 | 0.00317 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import operator
from django.core.urlresolvers import reverse, reverse_lazy
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_post_parameters
from horizon import exceptions
from horizon import forms
from horizon import tables
from openstack_dashboard import api
from .forms import CreateUserForm, UpdateUserForm
from .tables import UsersTable
class IndexView(tables.DataTableView):
table_class = UsersTable
template_name = 'admin/users/index.html'
def get_data(self):
users = []
try:
users = api.keystone.user_list(self.request)
except:
exceptions.handle(self.request,
_('Unable to retrieve user list.'))
return users
class UpdateView(forms.ModalFormView):
form_class = UpdateUserForm
template_name = 'admin/users/update.html'
success_url = reverse_lazy('horizon:admin:users:index')
@method_decorator(sensitive_post_parameters('password',
'confirm_password'))
def dispatch(self, *args, **kwargs):
return super(UpdateView, self).dispatch(*args, **kwargs)
def get_object(self):
if not hasattr(self, "_object"):
try:
self._object = api.keystone.user_get(self.request,
self.kwargs['user_id'],
admin=True)
except:
redirect = reverse("horizon:admin:users:index")
exceptions.handle(self.request,
_('Unable to update user.'),
redirect=redirect)
return self._object
def get_context_data(self, **kwargs):
context = super(UpdateView, self).get_context_data(**kwargs)
context['user'] = self.get_object()
return context
#trunglq add
def get_initial(self):
user = self.get_object()
try:
print user.secretkey
return {'id': user.id,
'name': user.name,
'tenant_id': getattr(user, 'tenantId', None),
'email': user.email,
'secretkey': user.secretkey}
except Exception as err:
return {'id': user.id,
'name': user.name,
'tenant_id': getattr(user, 'tenantId', None),
'email': user.email}
#end
class CreateView(forms.ModalFormView):
form_class = CreateUserForm
template_name = 'admin/users/create.html'
success_url = reverse_lazy('horizon:admin:users:index')
@method_decorator(sensitive_post_parameters('password',
'confirm_password'))
def dispatch(self, *args, **kwargs):
return super(CreateView, self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super(CreateView, self).get_form_kwargs()
try:
roles = api.keystone.role_list(self.request)
except:
redirect = reverse("horizon:admin:users:index")
exceptions.handle(self.request,
| _("Unable to retrieve user roles."),
| redirect=redirect)
roles.sort(key=operator.attrgetter("id"))
kwargs['roles'] = roles
return kwargs
def get_initial(self):
default_role = api.keystone.get_default_role(self.request)
return {'role_id': getattr(default_role, "id", None)}
|
MichalKononenko/python-qinfer | src/qinfer/ipy.py | Python | agpl-3.0 | 3,141 | 0.004459 | #!/usr/bin/python
# -*- coding: utf-8 -*-
##
# ipy.py: Interaction with IPython and Jupyter.
##
# © 2016 Chris Ferrie (csferrie@gmail.com) and
# Christopher E. Granade (cgranade@gmail.com)
#
# This file is a part of the Qinfer project.
# Licensed under the AGPL version 3.
##
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##
## FEATURES ###################################################################
from __future__ import | absolute_import
from __future__ import division
## EXPORTS ###################################################################
__all__ = ['IPythonProgressBar']
## IMPORTS ################################################################ | ####
try:
from IPython.display import display
import ipywidgets as ipw
except:
display = None
ipw = None
## CLASSES ###################################################################
class IPythonProgressBar(object):
"""
Represents a progress bar as an IPython widget. If the widget
is closed by the user, or by calling ``finalize()``, any further
operations will be ignored.
.. note::
This progress bar is compatible with QuTiP progress bar
classes.
"""
def __init__(self):
if ipw is None:
raise ImportError("IPython support requires the ipywidgets package.")
self.widget = ipw.FloatProgress(
value=0.0, min=0.0, max=100.0, step=0.5,
description=""
)
@property
def description(self):
"""
Text description for the progress bar widget,
or ``None`` if the widget has been closed.
:type: `str`
"""
try:
return self.widget.description
except:
return None
@description.setter
def description(self, value):
try:
self.widget.description = value
except:
pass
def start(self, max):
"""
Displays the progress bar for a given maximum value.
:param float max: Maximum value of the progress bar.
"""
try:
self.widget.max = max
display(self.widget)
except:
pass
def update(self, n):
"""
Updates the progress bar to display a new value.
"""
try:
self.widget.value = n
except:
pass
def finished(self):
"""
Destroys the progress bar.
"""
try:
self.widget.close()
except:
pass
|
diego0020/PySurfer | surfer/_commandline.py | Python | bsd-3-clause | 3,496 | 0 | """
This module defines the command-line interface for PySurfer.
It is defined here instead of in either the top level or
intermediate start-up scripts, as it is used in both.
There should be no reason to import this module in an
interpreter session.
"""
from argparse import ArgumentParser, RawDescriptionHelpFormatter
help_text = """
PySurfer is a package for visualization and interaction with cortical
surface representations of neuroimaging data from Freesurfer.
The command-line program pysurfer is designed to largely replicate
Freesufer's tksurfer command-line interface in the format and style
of arguments it accepts, and, like tksurfer, invoking it will initialize
a visualization in an external window and begin an IPython session in the
terminal, through which the visualization can be manipulated.
The visualization interface is exp | osed through methods on th | e `brain'
variable that will exist in IPython namespace when the program finishes
loading. Please see the PySurfer documentation for more information
about how to interact with the Brain object.
"""
parser = ArgumentParser(prog='pysurfer',
usage='%(prog)s subject_id hemisphere surface '
'[options]',
formatter_class=RawDescriptionHelpFormatter,
description=help_text)
parser.add_argument("subject_id",
help="subject id as in subjects dir")
parser.add_argument("hemi", metavar="hemi", choices=["lh", "rh",
"both", "split"],
help="hemisphere to load")
parser.add_argument("surf",
help="surface mesh (e.g. 'pial', 'inflated')")
parser.add_argument("-no-curv", action="store_false", dest="curv",
help="do not display the binarized surface curvature")
parser.add_argument("-morphometry", metavar="MEAS",
help="load morphometry file (e.g. thickness, curvature)")
parser.add_argument("-annotation", metavar="ANNOT",
help="load annotation (by name or filepath)")
parser.add_argument("-label",
help="load label (by name or filepath")
parser.add_argument("-borders", action="store_true",
help="only show label/annot borders")
parser.add_argument("-overlay", metavar="FILE",
help="load scalar overlay file")
parser.add_argument("-range", metavar=('MIN', 'MAX'), nargs=2,
help="overlay threshold and saturation point")
parser.add_argument("-min", type=float,
help="overlay threshold")
parser.add_argument("-max", type=float,
help="overlay saturation point")
parser.add_argument("-sign", default="abs", choices=["abs", "pos", "neg"],
help="overlay sign")
parser.add_argument("-name",
help="name to use for the overlay")
parser.add_argument("-size", default=800,
help="size of the display window (in pixels)")
parser.add_argument("-background", metavar="COLOR", default="black",
help="background color for display")
parser.add_argument("-cortex", metavar="COLOR", default="classic",
help="colormap for binary cortex curvature")
parser.add_argument("-title",
help="title to use for the figure")
parser.add_argument("-views", nargs="*", default=['lat'],
help="view list (space-separated) to use")
|
SublimeHaskell/SublimeHaskell | fly_check.py | Python | mit | 7,654 | 0.003266 | import threading
import time
import sublime
import sublime_plugin
import SublimeHaskell.autocomplete as Autocomplete
import SublimeHaskell.event_common as EventCommon
import SublimeHaskell.internals.backend_mgr as BackendManager
import SublimeHaskell.internals.settings as Settings
import SublimeHaskell.internals.utils as Utils
import SublimeHaskell.sublime_haskell_common as Common
import SublimeHaskell.types as Types
WAIT_TIMEOUT = 30.0 # secs
class FlyCheckViewEventListener(sublime_plugin.ViewEventListener):
'''The heart of fly-check support. As a view event listener, there will be an instance of this view listener
attached to each Haskell source view.
'''
@classmethod
def is_applicable(cls, settings):
return Common.settings_has_haskell_source(settings)
@classmethod
def applies_to_primary_view_only(cls):
return True
def __init__(self, view):
super().__init__(view)
self.autocompleter = Autocomplete.AutoCompleter()
self.fly_lock = threading.RLock()
self.fly_check_loop = threading.Event()
self.fly_check_flag = threading.Event()
self.fly_check_thread = None
self.next_flycheck = time.time() + Settings.PLUGIN.lint_check_fly_idle
self.inspect_loop = threading.Event()
self.inspect_flag = threading.Event()
self.inspect_thread = None
self.next_inspect = time.time() + Settings.PLUGIN.inspect_modified_idle
# They should start out as clear. Paranoia.
self.fly_check_loop.clear()
self.fly_check_flag.clear()
self.inspect_loop.clear()
self.inspect_flag.clear()
def on_activated(self):
if Settings.PLUGIN.inspect_modified:
with self.fly_lock:
self.inspect_thread = threading.Thread(
target=self.inspect,
name='inspect-{0}'.format(self.view.file_name())
)
self.inspect_loop.clear()
self.inspect_flag.clear()
self.next_inspect = time.time() + Settings.PLUGIN.inspect_modified_idle
self.inspect_thread.start()
else:
self.inspect_thread = None
if Settings.PLUGIN.lint_check_fly:
with self.fly_lock:
self.fly_check_thread = threading.Thread(target=self.fly_check,
name='fly-{0}'.format(self.view.file_name()))
self.fly_check_loop.clear()
self.fly_check_flag.clear()
self.next_flycheck = time.time() + Settings.PLUGIN.lint_check_fly_idle
self.fly_check_thread.start()
else:
self.fly_check_thread = None
def on_deactivated(self):
if self.fly_check_thread is not None:
self.fly_check_loop.set()
self.fly_check_flag.set()
self.fly_check_thread.join()
self.fly_check_thread = None
def on_modified(self):
if Settings.PLUGIN.inspect_modified:
with self.fly_lock:
self.next_inspect = time.time() + Settings.PLUGIN.inspect_modified_idle
self.inspect_flag.set()
if Settings.PLUGIN.lint_check_fly:
with self.fly_lock:
self.next_flycheck = time.time() + Settings.PLUGIN.lint_check_fly_idle
self.fly_check_flag.set()
def inspect(self):
tmo_event = threading.Event()
tmo_event.clear()
delta_t = None
while not self.inspect_loop.is_set():
# Wait for the on_modified method to set the flag to let us know that there's something
# for which we need to take action.
if Settings.COMPONENT_DEBUG.fly_mode:
print('fly: waiting for check flag, timeout {0}'.format(delta_t))
self.inspect_flag.wait(delta_t)
if not self.inspect_loop.is_set():
with self.fly_lock:
delta_t = self.next_inspect - time.time()
if Settings.COMPONENT_DEBUG.fly_mode:
print('fly: delt | a_t = {0}'.format(delta_t))
if delta_t <= 0:
done_inspect = threading.Event()
done_inspect.clear()
Utils.run_async('fly-inspect' | , self.do_inspect, done_inspect)
# Timeout shouldn't be needed... but paranoia is a good thing.
done_inspect.wait(WAIT_TIMEOUT)
delta_t = None
self.inspect_flag.clear()
def fly_check(self):
tmo_event = threading.Event()
tmo_event.clear()
delta_t = None
while not self.fly_check_loop.is_set():
# Wait for the on_modified method to set the flag to let us know that there's something
# for which we need to take action.
if Settings.COMPONENT_DEBUG.fly_mode:
print('fly: waiting for check flag, timeout {0}'.format(delta_t))
self.fly_check_flag.wait(delta_t)
if not self.fly_check_loop.is_set():
with self.fly_lock:
delta_t = self.next_flycheck - time.time()
if Settings.COMPONENT_DEBUG.fly_mode:
print('fly: delta_t = {0}'.format(delta_t))
if delta_t <= 0:
done_check = threading.Event()
done_check.clear()
Utils.run_async('fly-check', self.do_fly, done_check)
# Timeout shouldn't be needed... but paranoia is a good thing.
done_check.wait(WAIT_TIMEOUT)
delta_t = None
self.fly_check_flag.clear()
def scan_contents(self):
current_file_name = self.view.file_name()
status_msg = Common.status_message_process("Scanning {0}".format(current_file_name))
status_msg.start()
def scan_resp(_resp):
status_msg.result_ok()
_project_dir, project_name = Common.locate_cabal_project_from_view(self.view)
EventCommon.update_completions_async(self.autocompleter, project_name, files=[current_file_name])
def scan_err(_err, _details):
status_msg.result_fail()
BackendManager.active_backend().scan_file(
file=current_file_name,
build_tool=Settings.PLUGIN.haskell_build_tool,
on_response=scan_resp,
on_error=scan_err,
)
def do_inspect(self, done_inspect):
current_file_name = self.view.file_name()
BackendManager.active_backend().set_file_contents(
file=current_file_name,
contents=self.view.substr(sublime.Region(0, self.view.size())),
)
done_inspect.set()
def do_fly(self, done_check):
## Do the flycheck...
def on_done(successful_build):
if done_check:
done_check.set()
sublime.set_timeout(self.scan_contents, 0)
# Types.refresh_view_types(self.view)
def on_error(_view):
# Make sure to release the event, even if an error happens.
if done_check:
done_check.set()
if self.view.is_dirty():
current_file_name = self.view.file_name()
BackendManager.active_backend().set_file_contents(file=current_file_name,
contents=self.view.substr(sublime.Region(0, self.view.size())))
EventCommon.do_check_lint(self.view, continue_success=on_done, error_handler=on_error)
|
erikr/weerapi | weerapi/config.py | Python | mit | 157 | 0 | MEMCACHE_KEY = 'weerapi-cache'
MEMCACHE_EXPIRY = 120
MEMCACHE_SERVERS = ['127.0.0.1:11211']
URL = 'http://m.knmi.nl/inde | x.php?i=Actueel&s=tabel_10min_da | ta'
|
wavefront-mike/python-client | wavefront_client/api_client.py | Python | apache-2.0 | 20,266 | 0.000444 | # coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
ref: https://github.com/swagger-api/swagger-codegen
"""
from __future__ import absolute_import
from . import models
from .rest import RESTClientObject
from .rest import ApiException
import os
import re
import sys
import urllib
import json
import mimetypes
import random
import tempfile
import threading
from datetime import datetime
from datetime import date
# python 2 and python 3 compatibility library
from six import iteritems
|
try:
# for python3
from urllib.parse import quote
except ImportError:
# for python2
| from urllib import quote
from .configuration import Configuration
class ApiClient(object):
"""
Generic API client for Swagger client library builds.
Swagger generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the Swagger
templates.
NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
:param host: The base path for the server to call.
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to the API.
"""
def __init__(self, host=None, header_name=None, header_value=None, cookie=None):
"""
Constructor of the class.
"""
self.rest_client = RESTClientObject()
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
if host is None:
self.host = Configuration().host
else:
self.host = host
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'Swagger-Codegen/1.0.0/python'
@property
def user_agent(self):
"""
Gets user agent.
"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
"""
Sets user agent.
"""
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, callback=None):
# headers parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
for k, v in iteritems(path_params):
replacement = quote(str(self.to_path_value(v)))
resource_path = resource_path.\
replace('{' + k + '}', replacement)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = {k: self.to_path_value(v)
for k, v in iteritems(query_params)}
# post parameters
if post_params or files:
post_params = self.prepare_post_parameters(post_params, files)
post_params = self.sanitize_for_serialization(post_params)
# auth setting
self.update_params_for_auth(header_params, query_params, auth_settings)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
url = self.host + resource_path
# perform request and return response
response_data = self.request(method, url,
query_params=query_params,
headers=header_params,
post_params=post_params, body=body)
self.last_response = response_data
# deserialize response data
if response_type:
deserialized_data = self.deserialize(response_data, response_type)
else:
deserialized_data = None
if callback:
callback(deserialized_data)
else:
return deserialized_data
def to_path_value(self, obj):
"""
Takes value and turn it into a string suitable for inclusion in
the path, by url-encoding.
:param obj: object or string value.
:return string: quoted value.
"""
if type(obj) == list:
return ','.join(obj)
else:
return str(obj)
def sanitize_for_serialization(self, obj):
"""
Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is swagger model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
types = (str, int, float, bool, tuple)
if sys.version_info < (3,0):
types = types + (unicode,)
if isinstance(obj, type(None)):
return None
elif isinstance(obj, types):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, (datetime, date)):
return obj.isoformat()
else:
if isinstance(obj, dict):
obj_dict = obj
else:
# Convert model obj to dict except
# attributes `swagger_types`, `attribute_map`
# and attributes which value is not None.
# Convert attribute name to json key in
# model definition for request.
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in iteritems(obj.swagger_types)
if getattr(obj, attr) is not None}
return {key: self.sanitize_for_serialization(val)
for key, val in iteritems(obj_dict)}
def deserialize(self, response, response_type):
"""
Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: class literal for
deserialzied object, or string of class name.
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if "file" == response_type:
return self.__deserialize_file(response)
# fetch data from response object
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
"""
Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name |
nitin-cherian/Webapps | TalkPython/P4E/my_web_app/my_web_app/static_cache.py | Python | mit | 1,889 | 0.000529 | """
Use this cache utility to compute the hash of files server as static resources
on your website. As written, it must be in your '/' home folder, but you can
adjust the computation of *fullname* to allow you to put it in subfolders.
Set recompute_caches_every_request based on use case / deployment:
recompute_caches_every_request = False in production
recompute_caches_every_request = True in development
Use in your templates as:
<link
href="/static/css/site.css?cacheId=${build_cache_id('/static/css/site.css')}"
rel="stylesheet">
"""
import hashlib
import os
__full_path = os.path.dirname(os.path.abspath(__file__))
__hash_lookup = dict()
# Set this to False in production, True in development
recompute_caches_every_request = False
enable_tracing = True
def build_cache_id(relative_file_url: str):
if not relative_file_url:
return "ERROR_NO_FILE_SPECIFIED"
# Can we use a precomputed version?
use_hash = not recompute_caches_every_request
key = relative_file_url
if use_hash and key in __hash_lookup:
__trace("Using cached lookup for {} -> {}".format(key, __hash_lookup[key]))
return __hash_lookup[key]
fullname = os.path.abspath(os.path.j | oin(
__full_path, relative_file_url.lstrip('/')))
if not os.path.exists(fullname):
return "ERROR_MISSING_FILE"
digest_value = __get_file_hash(fullname)
__hash_lookup[key] = digest_value
__trace("Computed digest for {} -> {}".format(key, __hash_lookup[key]))
return digest_value
def __get_file_hash(filename):
md5 = hashlib.md5()
with open(filename, 'rb') as fin:
data = fin.read()
md5 | .update(data)
return md5.hexdigest()
def __trace(text):
# This is really for just seeing things in action.
# You might want real logging...
if not enable_tracing:
return
print(text)
|
Antiun/odoo | openerp/fields.py | Python | agpl-3.0 | 75,603 | 0.001958 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-2014 OpenERP (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" High-level objects for fields. """
from collections import OrderedDict
from datetime import date, datetime
from functools import partial
from operator import attrgetter
from types import NoneType
import logging
import pytz
import xmlrpclib
from openerp.tools import float_round, frozendict, html_sanitize, ustr, OrderedSet
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as DATETIME_FORMAT
DATE_LENGTH = len(date.today().strftime(DATE_FORMAT))
DATETIME_LENGTH = len(datetime.now().strftime(DATETIME_FORMAT))
EMPTY_DICT = frozendict()
_logger = logging.getLogger(__name__)
class SpecialValue(object):
""" Encapsulates a value in the cache in place of a normal value. """
def __init__(self, value):
self.value = value
def get(self):
return self.value
class FailedValue(SpecialValue):
""" Special value that encapsulates an exception instead of a value. """
def __init__(self, exception):
self.exception = exception
def get(self):
raise self.exception
def _check_value(value):
""" Return ``value``, or call its getter if ``value`` is a :class:`SpecialValue`. """
return value.get() if isinstance(value, SpecialValue) else value
def resolve_all_mro(cls, name, reverse=False):
""" Return the (successively overridden) values of attribute ``name`` in ``cls``
in mro order, or inverse mro order if ``reverse`` is true.
"""
klasses = reversed(cls.__mro__) if reverse else cls.__mro__
for klass in klasses:
if name in klass.__dict__:
yield klass.__dict__[name]
class MetaField(type):
""" Metaclass for field classes. """
by_type = {}
def __new__(meta, name, bases, attrs):
""" Combine the ``_slots`` dict from parent classes, and determine
``__slots__`` for them on the new class.
"""
base_slots = {}
for base in reversed(bases):
base_slots.update(getattr(base, '_slots', ()))
slots = dict(base_slots) |
slots.update(attrs.get('_slots', ()))
attrs['__slots__'] = set(slots) - set(base_ | slots)
attrs['_slots'] = slots
return type.__new__(meta, name, bases, attrs)
def __init__(cls, name, bases, attrs):
super(MetaField, cls).__init__(name, bases, attrs)
if cls.type and cls.type not in MetaField.by_type:
MetaField.by_type[cls.type] = cls
# compute class attributes to avoid calling dir() on fields
cls.column_attrs = []
cls.related_attrs = []
cls.description_attrs = []
for attr in dir(cls):
if attr.startswith('_column_'):
cls.column_attrs.append((attr[8:], attr))
elif attr.startswith('_related_'):
cls.related_attrs.append((attr[9:], attr))
elif attr.startswith('_description_'):
cls.description_attrs.append((attr[13:], attr))
class Field(object):
""" The field descriptor contains the field definition, and manages accesses
and assignments of the corresponding field on records. The following
attributes may be provided when instanciating a field:
:param string: the label of the field seen by users (string); if not
set, the ORM takes the field name in the class (capitalized).
:param help: the tooltip of the field seen by users (string)
:param readonly: whether the field is readonly (boolean, by default ``False``)
:param required: whether the value of the field is required (boolean, by
default ``False``)
:param index: whether the field is indexed in database (boolean, by
default ``False``)
:param default: the default value for the field; this is either a static
value, or a function taking a recordset and returning a value
:param states: a dictionary mapping state values to lists of UI attribute-value
pairs; possible attributes are: 'readonly', 'required', 'invisible'.
Note: Any state-based condition requires the ``state`` field value to be
available on the client-side UI. This is typically done by including it in
the relevant views, possibly made invisible if not relevant for the
end-user.
:param groups: comma-separated list of group xml ids (string); this
restricts the field access to the users of the given groups only
:param bool copy: whether the field value should be copied when the record
is duplicated (default: ``True`` for normal fields, ``False`` for
``one2many`` and computed fields, including property fields and
related fields)
:param string oldname: the previous name of this field, so that ORM can rename
it automatically at migration
.. _field-computed:
.. rubric:: Computed fields
One can define a field whose value is computed instead of simply being
read from the database. The attributes that are specific to computed
fields are given below. To define such a field, simply provide a value
for the attribute ``compute``.
:param compute: name of a method that computes the field
:param inverse: name of a method that inverses the field (optional)
:param search: name of a method that implement search on the field (optional)
:param store: whether the field is stored in database (boolean, by
default ``False`` on computed fields)
:param compute_sudo: whether the field should be recomputed as superuser
to bypass access rights (boolean, by default ``False``)
The methods given for ``compute``, ``inverse`` and ``search`` are model
methods. Their signature is shown in the following example::
upper = fields.Char(compute='_compute_upper',
inverse='_inverse_upper',
search='_search_upper')
@api.depends('name')
def _compute_upper(self):
for rec in self:
rec.upper = rec.name.upper() if rec.name else False
def _inverse_upper(self):
for rec in self:
rec.name = rec.upper.lower() if rec.upper else False
def _search_upper(self, operator, value):
if operator == 'like':
operator = 'ilike'
return [('name', operator, value)]
The compute method has to assign the field on all records of the invoked
recordset. The decorator :meth:`openerp.api.depends` must be applied on
the compute method to specify the field dependencies; those dependencies
are used to determine when to recompute the field; recomputation is
automatic and guarantees cache/database consistency. Note that the same
method can be used for several fields, you simply have to assign all the
given fields in the method; the method will be invoked once for all
those fields.
By default, a computed |
codingforentrepreneurs/ecommerce-2-api | src/ecommerce2/api_tests.py | Python | mit | 3,349 | 0.023589 | import requests
import json
cart_url = 'http://127.0.0.1:8000/api/cart/'
def create_cart():
# create cart
cart_r = requests.get(cart_url)
# get cart token
cart_token = cart_r.json()["token"]
return cart_token
def do_api_test(email=None, user_auth=None):
cart_token = create_cart()
# add items to cart
new_cart_url = cart_url + "?token=" + cart_token + "&item=10&qty=3"
new_cart_r = requests.get(new_cart_url)
#print new_cart_r.text
#get user_checkout token
user_checkout_url = 'http://127.0.0.1:8000/api/user/checkout/'
if email:
data = {
"email": email
}
u_c_r = requests.post(user_checkout_url, data=data)
user_checkout_token = u_c_r.json().get("user_checkout_token")
#print user_checkout_token
addresses = "http://127.0.0.1:8000/api/user/address/?checkout_token=" + user_checkout_token
#address = "http://127.0.0.1:8000/api/user/address/?checkout_token=eydicmFpbnRyZWVfaWQnOiB1JzY0ODM | xMzkzJywgJ3VzZXJfY2hlY2tvdXRfaWQnOiAxMSwgJ3N1Y2Nlc3MnOiBUcnVlfQ=="
addresses_r = requests.get(addresses)
addresses_r_data = addresses_r.json()
if addresses_r_data["count"] >= 2:
b_id = addresses_r_data["results"][0]["id"]
s_id = addresses_r_data["results"][1]["id"]
else:
addresses_create = "http://127.0.0.1:8000/api/user/address/create/"
user_id = 11 |
data = {
"user": user_id,
"type": "billing",
"street": "12423 Test",
"city": "Newport Beach",
"zipcode": 92304,
}
addresses_create_r = requests.post(addresses_create, data=data)
b_id = addresses_create_r.json().get("id")
data = {
"user": user_id,
"type": "shipping",
"street": "12423 Test",
"city": "Newport Beach",
"zipcode": 92304,
}
addresses_create_s_r = requests.post(addresses_create, data=data)
s_id = addresses_create_s_r.json().get("id")
"""
do checkout
"""
checkout_url = "http://127.0.0.1:8000/api/checkout/"
data = {
"billing_address": b_id,
"shipping_address": s_id,
"cart_token": cart_token,
"checkout_token": user_checkout_token
}
#print data
order = requests.post(checkout_url, data=data)
#print order.headers
print order.text
do_api_test(email='abc1234@gmail.com')
# base_url = "http://127.0.0.1:8000/api/"
# login_url = base_url + "auth/token/"
# products_url = base_url + "products/"
# refresh_url = login_url + "refresh/"
# cart_url = base_url + "cart/"
# #requests.get
# #requests.post(login_url, data=None, headers=None, params=None)
# data = {
# "username": "jmitchel3",
# "password": "123"
# }
# login_r = requests.post(login_url, data=data)
# login_r.text
# json_data = login_r.json()
# import json
# print(json.dumps(json_data, indent=2))
# token = json_data["token"]
# print token
# headers = {
# "Content-Type": "application/json",
# "Authorization": "JWT %s" %(token),
# }
# p_r = requests.get(products_url, headers=headers)
# print p_r.text
# print(json.dumps(p_r.json(), indent=2))
# #Refresh URL TOKEN
# data = {
# "token": token
# }
# refresh_r = requests.post(refresh_url, data=data)
# print refresh_r.json()
# token = refresh_r.json()["token"]
# cart_r = requests.get(cart_url)
# cart_token = cart_r.json()["token"]
# new_cart_url = cart_url + "?token=" + cart_token + "&item=10&qty=3&delete=True"
# new_cart_r = requests.get(new_cart_url)
# print new_cart_r.json()
|
simonsfoundation/CaImAn | caiman/utils/stats.py | Python | gpl-2.0 | 9,843 | 0.002845 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 20 13:49:57 2016
@author: agiovann
"""
from builtins import range
from past.utils import old_div
import logging
import numpy as np
import scipy
try:
import numba
except:
pass
from scipy.linalg.lapack import dpotrf, dpotrs
from scipy import fftpack
#%%
def mode_robust_fast(inputData, axis=None):
"""
Robust estimator of the mode of a data set using the half-sample mode.
.. versionadded: 1.0.3
"""
if axis is not None:
def fnc(x):
return mode_robust_fast(x)
dataMode = np.apply_along_axis(fnc, axis, inputData)
else:
# Create the function that we can use for the half-sample mode
data = inputData.ravel()
# The data need to be sorted for this to work
data = np.sort(data)
# Find the mode
dataMode = _hsm(data)
return dataMode
#%%
def mode_robust(inputData, axis=None, dtype=None):
"""
Robust estimator of the mode of a data set using the half-sample mode.
.. versionadded: 1.0.3
"""
if axis is not None:
def fnc(x):
return mode_robust(x, dtype=dtype)
dataMode = np.apply_along_axis(fnc, axis, inputData)
else:
# Create the function that we can use for the half-sample mode
def _hsm(data):
if data.size == 1:
return data[0]
elif data.size == 2:
return data.mean()
elif data.size == 3:
i1 = data[1] - data[0]
i2 = data[2] - data[1]
if i1 < i2:
return data[:2].mean()
elif i2 > i1:
return data[1:].mean()
else:
return data[1]
else:
wMin = np.inf
N = data.size // 2 + data.size % 2
for i in range(0, N):
w = data[i + N - 1] - data[i]
if w < wMin:
wMin = w
j = i
return _hsm(data[j:j + N])
data = inputData.ravel()
if type(data).__name__ == "MaskedArray":
data = data.compressed()
if dtype is not None:
data = data.astype(dtype)
# The data need to be sorted for this to work
dat | a = np.sort(data)
# Find the mode
dataMode = _hsm(data)
return dataMode
#%%
#@numba.jit("void | (f4[:])")
def _hsm(data):
if data.size == 1:
return data[0]
elif data.size == 2:
return data.mean()
elif data.size == 3:
i1 = data[1] - data[0]
i2 = data[2] - data[1]
if i1 < i2:
return data[:2].mean()
elif i2 > i1:
return data[1:].mean()
else:
return data[1]
else:
wMin = np.inf
N = old_div(data.size, 2) + data.size % 2
for i in range(0, N):
w = data[i + N - 1] - data[i]
if w < wMin:
wMin = w
j = i
return _hsm(data[j:j + N])
def compressive_nmf(A, L, R, r, X=None, Y=None, max_iter=100, ls=0):
"""Implements compressive NMF using an ADMM method as described in
Tepper and Shapiro, IEEE TSP 2015
min_{U,V,X,Y} ||A - XY||_F^2 s.t. U = LX >= 0 and V = YR >=0
"""
#r_ov = L.shape[1]
m = L.shape[0]
n = R.shape[1]
U = np.random.rand(m, r)
V = np.random.rand(r, n)
Y = V.dot(R.T)
Lam = np.zeros(U.shape)
Phi = np.zeros(V.shape)
l = 1
f = 1
x = 1
I = np.eye(r)
it = 0
while it < max_iter:
it += 1
X = np.linalg.solve(Y.dot(Y.T) + l*I, Y.dot(A.T) + (l*U.T - Lam.T).dot(L)).T
Y = np.linalg.solve(X.T.dot(X) + f*I, X.T.dot(A) + (f*V - Phi - ls).dot(R.T))
LX = L.dot(X)
U = LX + Lam/l
U = np.where(U>0, U, 0)
YR = Y.dot(R)
V = YR + Phi/f
V = np.where(V>0, V, 0)
Lam += x*l*(LX - U)
Phi += x*f*(YR - V)
print(it)
return X, Y
#%% kernel density estimation
def mode_robust_kde(inputData, axis=None):
"""
Extracting the dataset of the mode using kernel density estimation
"""
if axis is not None:
def fnc(x):
return mode_robust_kde(x)
dataMode = np.apply_along_axis(fnc, axis, inputData)
else:
# Create the function that we can use for the half-sample mode
bandwidth, mesh, density, cdf = kde(inputData)
dataMode = mesh[np.argamax(density)]
return dataMode
def df_percentile(inputData, axis=None):
"""
Extracting the percentile of the data where the mode occurs and its value.
Used to determine the filtering level for DF/F extraction. Note that
computation can be innacurate for short traces.
"""
if axis is not None:
def fnc(x):
return df_percentile(x)
result = np.apply_along_axis(fnc, axis, inputData)
data_prct = result[:, 0]
val = result[:, 1]
else:
# Create the function that we can use for the half-sample mode
err = True
while err:
try:
bandwidth, mesh, density, cdf = kde(inputData)
err = False
except:
logging.warning('Percentile computation failed. Duplicating ' + 'and trying again.')
if not isinstance(inputData, list):
inputData = inputData.tolist()
inputData += inputData
data_prct = cdf[np.argmax(density)] * 100
val = mesh[np.argmax(density)]
if data_prct >= 100 or data_prct < 0:
logging.warning('Invalid percentile computed possibly due ' + 'short trace. Duplicating and recomuputing.')
if not isinstance(inputData, list):
inputData = inputData.tolist()
inputData *= 2
err = True
if np.isnan(data_prct):
logging.warning('NaN percentile computed. Reverting to median.')
data_prct = 50
val = np.median(np.array(inputData))
return data_prct, val
"""
An implementation of the kde bandwidth selection method outlined in:
Z. I. Botev, J. F. Grotowski, and D. P. Kroese. Kernel density
estimation via diffusion. The Annals of Statistics, 38(5):2916-2957, 2010.
Based on the implementation in Matlab by Zdravko Botev.
Daniel B. Smith, PhD
Updated 1-23-2013
"""
def kde(data, N=None, MIN=None, MAX=None):
# Parameters to set up the mesh on which to calculate
N = 2**12 if N is None else int(2**scipy.ceil(scipy.log2(N)))
if MIN is None or MAX is None:
minimum = min(data)
maximum = max(data)
Range = maximum - minimum
MIN = minimum - Range / 10 if MIN is None else MIN
MAX = maximum + Range / 10 if MAX is None else MAX
# Range of the data
R = MAX - MIN
# Histogram the data to get a crude first approximation of the density
M = len(data)
DataHist, bins = scipy.histogram(data, bins=N, range=(MIN, MAX))
DataHist = DataHist / M
DCTData = fftpack.dct(DataHist, norm=None)
I = [iN * iN for iN in range(1, N)]
SqDCTData = (DCTData[1:] / 2)**2
# The fixed point calculation finds the bandwidth = t_star
guess = 0.1
try:
t_star = scipy.optimize.brentq(fixed_point, 0, guess, args=(M, I, SqDCTData))
except ValueError:
print('Oops!')
return None
# Smooth the DCTransformed data using t_star
SmDCTData = DCTData * scipy.exp(-scipy.arange(N)**2 * scipy.pi**2 * t_star / 2)
# Inverse DCT to get density
density = fftpack.idct(SmDCTData, norm=None) * N / R
mesh = [(bins[i] + bins[i + 1]) / 2 for i in range(N)]
bandwidth = scipy.sqrt(t_star) * R
density = density / scipy.trapz(density, mesh)
cdf = np.cumsum(density) * (mesh[1] - mesh[0])
return bandwidth, mesh, density, cdf
def fixed_point(t, M, I, a2):
l = 7
I = scipy.float64(I)
M = scipy.float64(M)
a2 = scipy.float64(a2)
f = 2 * scipy.pi**(2 * l) * scipy.sum(I**l * a2 * scipy.exp(-I * scipy.pi**2 * t))
for s |
trupty/aerospike-client-python | examples/client/get_key_digest.py | Python | apache-2.0 | 4,371 | 0.00366 | # -*- coding: utf-8 -*-
################################################################################
# Copyright 2013-2015 Aerospike, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from __future__ import print_function
import aerospike
import sys
from optparse import OptionParser
################################################################################
# Options Parsing
################################################################################
usage = "usage: %prog [options] key"
optparser = OptionParser(usage=usage, add_help_option=False)
optparser.add_option(
"--help", dest="help", action="store_true",
help="Displays this message.")
optparser.add_option(
"-U", "--username", dest="username", type="string", metavar="<USERNAME>",
help="Username to connect to database.")
optparser.add_option(
"-P", "--password", dest="password", type="string", metavar="<PASSWORD>",
help="Password to connect to database.")
optparser.add_option(
"-h", "--host", dest="host", type="string", default="127.0.0.1", metavar="<ADDRESS>",
help="Address of Aerospike server.")
optparser.add_option(
"-p", "--port", dest="port", type="int", default=3000, metavar="<PORT>",
help="Port of the Aerospike server.")
optparser.add_option(
"--timeout", dest="timeout", type="int", default=1000, metavar="<MS>",
help="Client timeout")
optparser.add_option(
"-n", "--namespace", dest="namespace", type="string", default="test", metavar="<NS>",
help="Port of the Aerospike server.")
optparser.add_option(
"-s", "--set", dest="set", type="string", default="demo", metavar="<SET>",
help="Port of the Aerospike server.")
(options, args) = optparser.parse_args()
if options.help:
optparser.print_help()
| print()
sys.exit(1)
if len(args) != 1:
optparse | r.print_help()
print()
sys.exit(1)
################################################################################
# Client Configuration
################################################################################
config = {
'hosts': [ (options.host, options.port) ],
'policies': {
'timeout': options.timeout
}
}
################################################################################
# Application
################################################################################
exitCode = 0
try:
# ----------------------------------------------------------------------------
# Connect to Cluster
# ----------------------------------------------------------------------------
client = aerospike.client(config).connect(options.username, options.password)
# ----------------------------------------------------------------------------
# Perform Operation
# ----------------------------------------------------------------------------
try:
namespace = options.namespace if options.namespace and options.namespace != 'None' else None
set = options.set if options.set and options.set != 'None' else None
key = args.pop(0)
digest = client.get_key_digest(namespace, set, key)
print("---")
print("Digest is: ", digest)
except Exception as e:
print("error: {0}".format(e), file=sys.stderr)
exitCode = 2
# ----------------------------------------------------------------------------
# Close Connection to Cluster
# ----------------------------------------------------------------------------
client.close()
except Exception as e:
print("error: {0}".format(e), file=sys.stderr)
exitCode = 3
################################################################################
# Exit
################################################################################
sys.exit(exitCode)
|
GNS3/gns3-legacy | src/GNS3/Link/PipeCapture.py | Python | gpl-2.0 | 3,431 | 0.004372 | # vim: expandtab ts=4 sw=4 sts=4 fileencoding=utf-8:
#
# Copyright (C) 2007-2010 GNS3 Development Team (http://www.gns3.net/team).
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation;
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# http://www.gns3.net/contact
#
import os, subprocess, time
from GNS3.Utils import debug
from PyQt4 import QtCore
try:
import win32pipe, win32file
except:
pass
class PipeCapture(QtCore.QThread):
def __init__(self, input_capture_file_path, capture_cmd, wireshark_pipe):
self.input_capture_file_path = input_capture_file_path
self.capture_cmd = capture_cmd
self.wireshark_pipe = wireshark_pipe
self.process = None
self.pipe = None
QtCore.QThread.__init__(self)
def __del__(self):
debug("Deleting pipe thread ...")
if self.pipe:
win32file.CloseHandle(self.pipe)
def run(self):
try:
in_file = open(self.input_capture_file_path, 'rb')
except IOError, e:
debug("Cannot open capture file: %s") % unicode(e)
self.exit()
return
try:
self.process = subprocess.Popen(self.capture_cmd.strip())
except (OSError, IOError), e:
debug("Cannot start Wireshark: %s") % unicode(e)
self.exit()
return
try:
self.pipe = win32pipe.CreateNamedPipe(self.wireshark_pipe, win32pipe.PIPE_ACCESS_OUTBOUND, win32pipe.PIPE_TYPE_MESSAGE | win32pipe.PIPE_WAIT, 1, 65536, 65536, 300, None)
win32pipe.ConnectNamedPipe(self.pipe, None)
except win32pipe.error:
debug("Error while creating and connecting the pipe ...")
win32file.CloseHandle(self.pipe)
self.exit()
return
while True:
data = in_file.read()
if not self.process or self.process.returncode != None:
win32file.CloseHandle(self.pipe)
debug("Wireshark is not running, deleting pipe ...")
self.exit()
return
if data:
try:
win32file.WriteFile(self.pipe, data)
except:
win32file.CloseHan | dle(self.pipe)
debug("Wireshark has been closed, deleting pipe ...")
self.exit()
return
else:
time.sleep(0.5) #FIXME: find a better way to wake-up the thread only when there is | data to read
if __name__ == '__main__':
capture_cmd = "C:\Program Files (x86)\Wireshark\wireshark.exe -k -i %p"
pipe = r'\\.\pipe\GNS3\R1_to_R2'
capture_file = "capture.pcap"
path = unicode(capture_cmd.replace("%p", "%s") % pipe)
t = PipeCapture(capture_file, path, pipe)
t.setDaemon(True)
t.start()
t.join(10) # let run the thread for 10 seconds and stop it
|
cessor/gameoflife | config.py | Python | mit | 2,820 | 0.013121 | from collections import namedtuple
Resolution = namedtuple('Resolution', ['x', 'y'])
class Resolutions(object):
resolutions = [
(1920, 1200),
(1920, 1080),
(1680, 1050),
(1440, 900),
(1360, 768),
(1280, 800),
(1024, 640)
]
@classmethod
def parse(self, x, y):
if (x,y) not in self.resolutions:
resolutions = ', '.join(['%sx%s' % (a, b) for a,b in self.resolutions])
raise Exception('Resolution %s x %s not supported. Available resolutions: %s' % (x,y, resolutions) )
return Resolution(x, y)
class Color(object):
gray | = (0.15, 0.15, 0.13, 1.0)
black = (0.0, 0.0, 0.0, 1.0)
white = (1.0, 1.0, 1.0, 1.0)
red = (1.0, 0.2, 0.0, 1.0)
orange = (1.0, 0.4, 0.0, 1.0)
yellow = (1.0, 0.9, 0.0, 1.0)
light_green = (0.4, 1.0, 0.0, 1.0)
green = (0.0, 1.0, 0.2, 1.0)
cyan = (0.0, 1.0, 0.4, 1.0)
light_blue = (0.0, 0.6, 1.0, 1.0)
blue = (0.0, 0.2, 1.0, 1.0)
purple = (0.4, 0.0, 1.0, 1.0)
pink = (1.0, 0.0, 0.8 | , 1.0)
@classmethod
def __colors(self):
return [key for key in self.__dict__.keys() if not key.startswith('_') and key != 'named']
@classmethod
def named(self, name):
if not hasattr(self, name):
colors = ', '.join(self.__colors())
raise Exception('Unknown color %s. Available colors are: %s' % (name, colors))
return getattr(self, name)
def try_parse(value):
try: return int(value)
except: return { 'true': True, 'false': False }.get(value.lower(), value)
def read_config():
with open('config.cfg', 'r') as cfg_file:
lines = cfg_file.readlines()
lines = [
line.strip().replace(' ', '').split('=')
for line in lines
if line.strip() and '=' in line
]
cfg = {key:try_parse(value) for key,value in lines}
return cfg
cfg = read_config()
NUM_CELLS = cfg.get('CELLS', 100)
RESOLUTION = Resolutions.parse(cfg.get('WINDOW_WIDTH', 1280), cfg.get('WINDOW_HEIGHT', 800))
limit = min(RESOLUTION)
PIXEL_PER_CELL = limit / NUM_CELLS
OFFSET_X = (RESOLUTION.x - (NUM_CELLS * PIXEL_PER_CELL)) / 2
OFFSET_Y = (RESOLUTION.y - (NUM_CELLS * PIXEL_PER_CELL)) / 2
SHOW_FULLSCREEN = cfg.get('FULLSCREEN', False)
SHOW_GRID = cfg.get('SHOW_GRID', True)
BACKGROUND_COLOR = Color.named(cfg.get('BACKGROUND_COLOR', 'black'))
GRID_BACKDROP_COLOR = Color.named(cfg.get('GRID_BACKDROP_COLOR', 'gray'))
GRID_LINE_COLOR = Color.named(cfg.get('GRID_LINE_COLOR', 'black'))
CELL_COLOR = Color.named(cfg.get('CELL_COLOR', 'green'))
CURSOR_COLOR = Color.named(cfg.get('CURSOR_COLOR', 'red')) |
vasnake/fb2tools | fb2tools/fb2desc.py | Python | gpl-3.0 | 24,174 | 0.0069 | #!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# (c) Con Radchenko mailto:lankier@gmail.com
#
# $Id: fb2desc.py,v 1.10 2008/09/15 04:18:45 con Exp con $
#
import sys, os
import locale
import getopt
import codecs
import zipfile
from cStringIO import StringIO
import xml.sax
import shutil
import traceback
def get_filename(authors_list, sequence_name, sequence_number, title):
'''Форматы:
1 - "полные имена авторов, разделенные запятой - название (серия #номер)"
2 - тоже, но преобразованное в транслит и с заменой пробелов
3 - "фамилии авторов, разделенные зап | ятой - название"
4 - тоже, но преобразованное в транслит и с заменой пробелов
5 - "первая буква автора в нижнем регистре/авторы, разделенные запятой, в нижнем регистре/авторы, разделенные запятой - название (серия #номер)"
6 - тоже, но преобразованное в транслит и с заменой пробелов
'''
format = options['fn-format']
out = []
authors = []
full_authors = []
for a in authors_lis | t:
if a[0]:
authors.append(a[0])
fa = ' '.join(i for i in a if i)
if fa:
full_authors.append(fa)
authors = ', '.join(authors)
if not authors:
authors = 'unknown'
full_authors = ', '.join(full_authors)
if not full_authors:
full_authors = 'unknown'
if not title:
title = 'unknown'
seq = ''
if sequence_name:
if sequence_number:
seq = '(%s #%s)' % (sequence_name, sequence_number)
else:
seq = '(%s)' % sequence_name
if format == 3:
out.append(authors)
out.append('-')
out.append(title)
out = ' '.join(out)
else:
out.append(full_authors)
out.append('-')
out.append(title)
if seq:
out.append(seq)
out = ' '.join(out)
if format in (2, 4, 6):
out = translit(out)
full_authors = translit(full_authors)
#out = out.replace('/', '%').replace('\0', '').replace('?', '')
for c in '|\\?*<":>+[]/': # invalid chars in VFAT
out = out.replace(c, '')
if format in (4, 5):
full_authors = full_authors.replace(c, '')
fn_max = 240
if format in (5, 6):
fl = full_authors[0]
if not fl.isalpha():
fl = full_authors[1] # FIXME
out = os.path.join(
fl.lower().encode(options['charset']),
full_authors.lower().encode(options['charset'])[:fn_max],
out.encode(options['charset'])[:fn_max])
else:
out = out.encode(options['charset'])[:fn_max]
return out
##----------------------------------------------------------------------
options = {
'format' : '',
'charset' : 'utf-8',
'zip-charset' : 'cp866',
'elements' : [],
'replace' : False,
'rename' : False,
'slink' : False,
'copy' : False,
'fn-format' : 2,
'show-cover' : False,
'show-content' : False,
'show-tree' : False,
'image-viewer' : 'xv',
'quiet' : False,
'dest-dir' : None,
#
'suffix' : None,
}
##----------------------------------------------------------------------
class StopParsing(Exception):
pass
##----------------------------------------------------------------------
# u'\u2013' -> '--'
# u'\u2014' -> '---'
# u'\xa0' -> неразрывный пробел
# u'\u2026' -> dots...
# u'\xab' -> '<<'
# u'\xbb' -> '>>'
# u'\u201c' -> ``
# u'\u201d' -> ''
# u'\u201e' -> ,,
def replace_chars(s):
return (s
.replace(u'\u2013', u'--')
.replace(u'\u2014', u'---')
.replace(u'\xa0' , u' ')
.replace(u'\u2026', u'...')
.replace(u'\xab' , u'<<')
.replace(u'\xbb' , u'>>')
.replace(u'\u201c', u'``')
.replace(u'\u201d', u'\'\'')
.replace(u'\u201e', u',,')
)
def translit(s):
trans_tbl = {
u'\u0430': 'a', #а
u'\u0431': 'b', #б
u'\u0432': 'v', #в
u'\u0433': 'g', #г
u'\u0434': 'd', #д
u'\u0435': 'e', #е
u'\u0451': 'yo', #ё
u'\u0436': 'zh', #ж
u'\u0437': 'z', #з
u'\u0438': 'i', #и
u'\u0439': 'y', #й
u'\u043a': 'k', #к
u'\u043b': 'l', #л
u'\u043c': 'm', #м
u'\u043d': 'n', #н
u'\u043e': 'o', #о
u'\u043f': 'p', #п
u'\u0440': 'r', #р
u'\u0441': 's', #с
u'\u0442': 't', #т
u'\u0443': 'u', #у
u'\u0444': 'f', #ф
u'\u0445': 'h', #х
u'\u0446': 'c', #ц
u'\u0447': 'ch', #ч
u'\u0448': 'sh', #ш
u'\u0449': 'sh', #щ
u'\u044a': '', #ъ
u'\u044b': 'y', #ы
u'\u044c': '', #ь
u'\u044d': 'e', #э
u'\u044e': 'ju', #ю
u'\u044f': 'ya', #я
}
alnum = 'abcdefghijklmnopqrstuvwxyz0123456789'
out = []
out_s = ''
for i in s.lower():
if i.isalnum():
if i in trans_tbl:
out_s += trans_tbl[i]
elif i in alnum:
out_s += i
else:
if out_s: out.append(out_s)
out_s = ''
if out_s: out.append(out_s)
return '_'.join(out)
def wrap_line(s):
if len(s) <= 70:
return u' '+s
ss = u' '
sl = []
for word in s.split():
if len(ss+word) > 72:
sl.append(ss)
ss = word
elif ss:
ss += u' ' + word
else:
ss = word
sl.append(ss)
return '\n'.join(sl)
##----------------------------------------------------------------------
def show_cover(filename, data, content_type):
if not data:
print >> sys.stderr, '%s: sorry, cover not found' % filename
return
import base64, tempfile
data = base64.decodestring(data)
if content_type and content_type.startswith('image/'):
suffix = '.'+content_type[6:]
else:
suffix = ''
tmp_id, tmp_file = tempfile.mkstemp(suffix)
try:
open(tmp_file, 'w').write(data)
os.system(options['image-viewer']+' '+tmp_file)
finally:
os.close(tmp_id)
os.remove(tmp_file)
def show_content(filename, titles):
for secttion_level, data in titles:
if options['replace']: data = replace_chars(data)
print ' '*secttion_level+data.encode(options['charset'], 'replace')
print
def rename(filename, zipfilename, desc, data):
to = pretty_format(filename, zipfilename, len(data), desc, 'filename')
##filename = os.path.abspath(filename)
to += options['suffix']
if options['dest-dir']:
to = os.path.join(options['dest-dir'], to)
to = os.path.abspath(to)
if os.path.exists(to):
print >> sys.stderr, 'file %s already exists' % to
return
dir_name = os.path.dirname(to)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
if options['slink']:
os.symlink(filename, to)
return
elif options['copy']:
shutil.copy(filename, to)
return
os.rename(filename, to)
def pretty_format(filename, zipfilename, filesize, desc, format='pretty'):
ann = []
title = ''
authors_list = []
# [last-name, first-name, middle-name, nick-name]
author_name = [None, None, None, None]
genres = []
sequence_name = ''
sequence_number = ''
for elem, data in desc:
## data = data.strip()
## if not data:
## continue
if elem.startswith('/description/title-info/annotation/'):
if not elem.endswith('href'):
ann.append(data) #wrap_line(data))
if elem.endswith('/p'):
ann.append('\n')
elif elem == '/description/title-info/book-title':
title = data
elif elem == '/description/title-info/author/first-name':
author_name[1] = data
elif elem == '/description/title-info/author/middle-name':
author_name[2] = data
elif elem == '/description/title-info/author/last-name':
author_name[0] = data
authors_list.append(author_ |
lotusronin/KronosEngine | editor/mapentity.py | Python | mit | 592 | 0.045608 | #!/usr/bin/python
# File: mapentity.py
# import pygtk
# pygtk.require('2.0')
from gi.repository import Gtk, Gdk
class MapEntity:
# self.x = None
# self.y = None
# self.name = None
# self.texture = None
def getCoords(self):
return self.x,self.y
def getx(self): |
return self.x
def gety(self):
return self.y
def setCoords(self,xcoord,ycoord):
self.x = xcoord
self.y = ycoord
def getName(self):
return self.name
def setName(self, strname):
self.name = strname
def __init__(self, xarg, yarg, namearg):
self.x = xarg
self.y = yarg
self.name = nam | earg
return |
nickburlett/feincms_gallery | gallery/admin.py | Python | bsd-3-clause | 5,892 | 0.004073 | #coding=utf-8
import json
from django import forms
from django.contrib import admin
from django.core.exceptions import FieldError, ObjectDoesNotExist
from django.http import (HttpResponse, HttpResponseRedirect,
HttpResponseBadRequest, HttpResponseForbidden)
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.utils.html import escapejs
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _, ungettext
from django.views.decorators.csrf import csrf_exempt
from feincms.module.medialibrary.models import Category, MediaFile
from feincms.templatetags import feincms_thumbnail
from models import Gallery, GalleryMediaFile
class MediaFileWidget(forms.TextInput):
"""
TextInput widget, shows a link to the current value if there is one.
"""
def render(self, name, value, attrs=None):
inputfield = super(MediaFileWidget, self).render(name, value, attrs)
if value:
try:
mf = MediaFile.objects.get(pk=value)
except MediaFile.DoesNotExist:
return inputfield
try:
caption = mf.translation.caption
except (ObjectDoesNotExist, AttributeError):
caption = _('(no caption)')
if mf.type == 'image':
image = feincms_thumbnail.thumbnail(mf.file.name, '188x142')
image = u'background: url(%(url)s) center center no-repeat;' % {'url': image}
else:
image = u''
return mark_safe(u"""
<div style="%(image)s" class="admin-gallery-image-bg absolute">
<p class="admin-gallery-image-caption absolute">%(caption)s</p>
%(inputfield)s</div>""" % {
'image': image,
'caption': caption,
'inputfield': inputfield})
return inputfield
class ThumbnailForm(forms.Form):
id = forms.ModelChoiceField(
queryset=MediaFile.objects.filter(type='image')
)
width = forms.IntegerField(min_value=0)
height = forms.IntegerField(min_value=0)
@csrf_exempt
def admin_thumbnail(request):
content = u''
if request.method == 'POST' and request.is_ajax():
form = ThumbnailForm(request.POST)
if not form.is_valid():
return HttpResponseB | adRequest(form.errors)
data = form.cleaned_data
obj = data['id']
dimensions = '%sx%s' % (data['width'], data['height'])
if obj.type == 'image':
image = None
try:
image | = feincms_thumbnail.thumbnail(obj.file.name, dimensions)
except:
pass
if image:
content = json.dumps({
'url': image.url,
'name': escapejs(obj.translation.caption)
})
return HttpResponse(content, mimetype='application/json')
else:
return HttpResponseForbidden()
admin_thumbnail.short_description = _('Image')
admin_thumbnail.allow_tags = True
class MediaFileAdminForm(forms.ModelForm):
mediafile = forms.ModelChoiceField(queryset=MediaFile.objects.filter(type='image'),
widget=MediaFileWidget(attrs={'class': 'image-fk'}), label=_('media file'))
class Meta:
model = GalleryMediaFile
fields = ['mediafile']
class GalleryMediaFileAdmin(admin.ModelAdmin):
form = MediaFileAdminForm
model = GalleryMediaFile
list_display = ['__unicode__', admin_thumbnail]
classes = ['sortable']
class GalleryMediaFileInline(admin.StackedInline):
model = GalleryMediaFile
raw_id_fields = ('mediafile',)
extra = 0
form = MediaFileAdminForm
classes = ['sortable']
ordering = ['ordering']
template = 'admin/gallery/gallery/stacked.html'
class GalleryAdmin(admin.ModelAdmin):
inlines = (GalleryMediaFileInline,)
list_display = ['title', 'verbose_images']
class AddCategoryForm(forms.Form):
_selected_action = forms.CharField(widget=forms.MultipleHiddenInput)
category = forms.ModelChoiceField(Category.objects)
def assign_category(self, request, queryset):
form = None
if 'apply' in request.POST:
form = self.AddCategoryForm(request.POST)
if form.is_valid():
category = form.cleaned_data['category']
count = 0
mediafiles = MediaFile.objects.filter(categories=category)
for gallery in queryset:
for mediafile in mediafiles:
try:
GalleryMediaFile.objects.create(gallery = gallery, mediafile=mediafile)
except FieldError:
pass
count += 1
message = ungettext('Successfully added %(count)d mediafiles in %(category)s Category.',
'Successfully added %(count)d mediafiles in %(category)s Categories.', count) % {
'count':count, 'category':category }
self.message_user(request, message)
return HttpResponseRedirect(request.get_full_path())
if not form:
form = self.AddCategoryForm(initial={'_selected_action': request.POST.getlist(admin.ACTION_CHECKBOX_NAME)})
return render_to_response('admin/gallery/add_category.html', {'mediafiles': queryset,
'category_form': form,
}, context_instance=RequestContext(request))
assign_category.short_description = _('Assign Images from a Category to this Gallery')
actions = [assign_category]
admin.site.register(Gallery, GalleryAdmin)
|
joelsmith/openshift-tools | ansible/roles/lib_openshift_3.2/library/oc_pvc.py | Python | apache-2.0 | 39,870 | 0.002558 | #!/usr/bin/env python # pylint: disable=too-many-lines
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
import atexit
import json
import os
import re
import shutil
import subprocess
import ruamel.yaml as yaml
#import yaml
#
## This is here because of a bug that causes yaml
## to incorrectly handle timezone info on timestamps
#def timestamp_constructor(_, node):
# '''return timestamps as strings'''
# return str(node.value)
#yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = kubeconfig
self.all_namespaces = all_namespaces
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = '/tmp/%s' % rname
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''return all pods '''
cmd = ['-n', self.namespace, 'replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''return all pods '''
fname = '/tmp/%s' % rname
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''return all pods '''
return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
def _delete(self, resource, rname, selector=None):
'''return all pods '''
cmd = ['delete', resource, rname, '-n', self.namespace]
if selector:
cmd.append('--selector=%s' % selector)
return self.openshift_ | cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None):
'''return all pods '''
cmd = ['process', '-n', self.namespace]
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["%s=%s" % (key, value) for key, value in p | arams.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = '/tmp/%s' % template_name
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['-n', self.namespace, 'create', '-f', fname])
def _get(self, resource, rname=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector:
cmd.append('--selector=%s' % selector)
if self.all_namespaces:
cmd.extend(['--all-namespaces'])
elif self.namespace:
cmd.extend(['-n', self.namespace])
cmd.extend(['-o', 'json'])
if rname:
cmd.append(rname)
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if rval.has_key('items'):
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
cmd.append('--schedulable=%s' % schedulable)
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
#pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
if grace_period:
cmd.append('--grace-period=%s' % int(grace_period))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
#pylint: disable=too-many-arguments
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = []
if oadm:
cmds = ['/usr/bin/oadm']
else:
cmds = ['/usr/bin/oc']
cmds.extend(cmd)
rval = {}
results = ''
err = None
if self.verbose:
print ' '.join(cmds)
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env={'KUBECONFIG': self.kubeconfig})
stdout, stderr = proc.communicate(input_data)
rval = {"returncode": proc.returncode,
"results": results,
"cmd": ' '.join(cmds),
}
if proc.returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.message:
err = err.message
elif output_type == 'raw':
|
eduNEXT/edunext-platform | import_shims/lms/discussion/tests/test_views.py | Python | agpl-3.0 | 404 | 0.009901 | """Deprecated | import support. Auto-generated by import_shims/generate_shims.sh."""
# pylint: disable=redefined-builtin,wrong-import-position,wildcard-import,useless-suppression,line-too-long
from import_shims.warn import warn_deprecated_import
warn_deprecated_import('discussion.tests.test_views', 'lms.djangoapps.discussion.tes | ts.test_views')
from lms.djangoapps.discussion.tests.test_views import *
|
cmusatyalab/opendiamond | opendiamond/scopeserver/gigapan/views.py | Python | epl-1.0 | 4,425 | 0.002486 | #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2019 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from future import standard_library
standard_library.install_aliases()
from builtins import str
from urllib.parse import quote_plus
from urllib.request import urlopen
from urllib.error import HTTPError
from django.contrib.auth.decorators import permission_required
from django.conf import settings
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import redirect, render
from django.urls import reverse
from opendiamond.scope import generate_cookie_django
from .forms import GigaPanSearchForm, GigaPanChoiceForm
try:
import json
except ImportError:
import simplejson as json
def generate_cooki | e(ids):
def full_url(gigapan):
return 'http://%s:5873%s' % (settings.GIGAPAN_SERVER, gigapan)
gigapans = ["/gigapan/%d" % int(gigap | an_id) for gigapan_id in ids]
proxies = settings.GIGAPAN_PROXIES
blaster = getattr(settings, 'GIGAPAN_BLASTER', None)
if len(proxies) > len(gigapans):
mapping = {} # gigapan -> [proxy]
gigapan_index = 0
for proxy in proxies:
mapping.setdefault(gigapans[gigapan_index], []).append(proxy)
gigapan_index = (gigapan_index + 1) % len(gigapans)
cookies = []
for gigapan in gigapans:
if len(mapping[gigapan]) > 1:
cookies.append(generate_cookie_django(
[gigapan], servers=[settings.GIGAPAN_SERVER],
proxies=mapping[gigapan], blaster=blaster))
else:
cookies.append(generate_cookie_django(
[full_url(gigapan)], mapping[gigapan], blaster=blaster))
cookie = ''.join(cookies)
else:
mapping = {} # proxy -> [gigapan]
proxy_index = 0
for gigapan in gigapans:
mapping.setdefault(proxies[proxy_index],
[]).append(full_url(gigapan))
proxy_index = (proxy_index + 1) % len(proxies)
cookie = ''.join([generate_cookie_django(mapping[proxy], [proxy],
blaster=blaster) for proxy in proxies])
return HttpResponse(cookie, content_type='application/x-diamond-scope')
@permission_required("gigapan.search")
def generate(request):
'''Generate cookie'''
# hack to defeat validation of set membership
form = GigaPanChoiceForm(request.POST or None,
ids=request.POST.getlist('gigapan_choice'))
if form.is_valid():
return generate_cookie(form.cleaned_data['gigapan_choice'])
return redirect('index')
@permission_required("gigapan.search")
def browse(request):
'''Parse search form, perform search'''
form = GigaPanSearchForm(request.GET or None)
if form.is_valid():
query = form.cleaned_data.get('search')
if query.isdigit():
api_url = ('http://api.gigapan.org/beta/gigapans/%d.json' %
int(query))
try:
# Check that the ID is valid
urlopen(api_url)
ids = [query]
except HTTPError:
ids = []
else:
url = "http://api.gigapan.org/beta/gigapans/page/1/matching/"
url += "%s/most_popular.json" % quote_plus(query)
text = str(urlopen(url).read())
data = json.loads(text)
ids = [id for id, _ in data[u'items']]
if ids:
choiceform = GigaPanChoiceForm(ids=ids)
return render(request, 'scopeserver/gigapan_browse.html', {
'form': choiceform
})
return HttpResponseRedirect(reverse('index') + "?error=True")
return redirect('index')
@permission_required("gigapan.search")
def index(request):
'''Generate search form'''
form = GigaPanSearchForm()
if request.GET:
return render(request, 'scopeserver/gigapan_search.html', {
'form': form,
'errors': "No results found",
})
return render(request, 'scopeserver/gigapan_search.html', {
'form': form,
})
|
20tab/upy | upy/contrib/rosetta/views.py | Python | bsd-3-clause | 18,100 | 0.003812 | from django.conf import settings
from django.contrib.auth.decorators import user_passes_test
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
from django.http import Http404, HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils.encoding import smart_unicode, iri_to_uri
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.cache import never_cache
from upy.contrib.rosetta.conf import settings as rosetta_settings
from upy.contrib.rosetta.polib import pofile
from upy.contrib.rosetta.poutil import find_pos, pagination_range
from upy.contrib.rosetta.signals import entry_changed, post_save
from upy.contrib.rosetta.storage import get_storage
import re
from upy.contrib import rosetta
import datetime
import unicodedata
import hashlib
import os
def home(request):
"""
Displays a list of messages to be translated
"""
def fix_nls(in_, out_):
"""Fixes submitted translations by filtering carriage returns and pairing
newlines at the begging and end of the translated string with the original
"""
if 0 == len(in_) or 0 == len(out_):
return out_
if "\r" in out_ and "\r" not in in_:
out_ = out_.replace("\r", '')
if "\n" == in_[0] and "\n" != out_[0]:
out_ = "\n" + out_
elif "\n" != in_[0] and "\n" == out_[0]:
out_ = out_.lstrip()
if "\n" == in_[-1] and "\n" != out_[-1]:
out_ = out_ + "\n"
elif "\n" != in_[-1] and "\n" == out_[-1]:
out_ = out_.rstrip()
return out_
storage = get_storage(request)
version = rosetta.get_version(True)
if storage.has('rosetta_i18n_fn'):
rosetta_i18n_fn = storage.get('rosetta_i18n_fn')
rosetta_i18n_app = get_app_name(rosetta_i18n_fn)
rosetta_i18n_lang_code = storage.get('rosetta_i18n_lang_code')
rosetta_i18n_lang_bidi = rosetta_i18n_lang_code.split('-')[0] in settings.LANGUAGES_BIDI
rosetta_i18n_write = storage.get('rosetta_i18n_write', True)
if rosetta_i18n_write:
rosetta_i18n_pofile = pofile(rosetta_i18n_fn, wrapwidth=rosetta_settings.POFILE_WRAP_WIDTH)
for entry in rosetta_i18n_pofile:
entry.md5hash = hashlib.md5(
entry.msgid.encode("utf8") +
entry.msgstr.encode("utf8") +
(entry.msgctxt and entry.msgctxt.encode("utf8") or "")
).hexdigest()
else:
rosetta_i18n_pofile = storage.get('rosetta_i18n_pofile')
if 'filter' in request.GET:
if request.GET.get('filter') in ('untranslated', 'translated', 'fuzzy', 'all'):
filter_ = request.GET.get('filter')
storage.set('rosetta_i18n_filter', filter_)
return HttpResponseRedirect(reverse('rosetta-home'))
rosetta_i18n_filter = storage.get('rosetta_i18n_filter', 'all')
if '_next' in request.POST:
rx = re.compile(r'^m_([0-9a-f]+)')
rx_plural = re.compile(r'^m_([0-9a-f]+)_([0-9]+)')
file_change = False
for key, value in request.POST.items():
md5hash = None
plural_id = None
if rx_plural.match(key):
md5hash = str(rx_plural.match(key).groups()[0])
# polib parses .po files into unicode strings, but
# doesn't bother to convert plural indexes to int,
# so we need unicode here.
plural_id = unicode(rx_plural.match(key).groups()[1])
elif rx.match(key):
md5hash = str(rx.match(key).groups()[0])
if md5hash is not None:
entry = rosetta_i18n_pofile.find(md5hash, 'md5hash')
# If someone did a makemessage, some entries might
# have been removed, so we need to check.
if entry:
old_msgstr = entry.msgstr
if plural_id is not None:
#plural_string = fix_nls(entry.msgstr_plural[plural_id], value)
plural_string = fix_nls(entry.msgid_plural, value)
entry.msgstr_plural[plural_id] = plural_string
else:
entry.msgstr = fix_nls(entry.msgid, value)
is_fuzzy = bool(request.POST.get('f_%s' % md5hash, False))
old_fuzzy = 'fuzzy' in entry.flags
if old_fuzzy and not is_fuzzy:
entry.flags.remove('fuzzy')
elif not old_fuzzy and is_fuzzy:
entry.flags.append('fuzzy')
file_change = True
if old_msgstr != value or old_fuzzy != is_fuzzy:
entry_changed.send(sender=entry,
user=request.user,
old_msgstr=old_msgstr,
old_fuzzy=old_fuzzy,
pofile=rosetta_i18n_fn,
language_code=rosetta_i18n_lang_code,
)
else:
storage.set('rosetta_last_save_error', True)
if file_change and rosetta_i18n_write:
try:
# Provide defaults in case authorization is not required.
request.user.first_name = getattr(request.user, 'first_name', 'Anonymous')
request.user.last_name = getattr(request.user, 'last_name', 'User')
request.user.email = getattr(request.user, 'email', 'anonymous@user.tld')
rosetta_i18n_pofile.metadata['Last-Translator'] = unicodedata.normalize('NFKD', u"%s %s <%s>" % (request.user.first_name, request.user.last_name, request.user.email)).encode('ascii', 'ignore')
rosetta_i18n_pofile.metadata['X-Translated-Using'] = u"django-rosetta %s" % rosetta.get_version(False)
rosetta_i18n_pofile.metadata['PO-Revision-Date'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M%z')
except UnicodeDecodeError:
pass
try:
rosetta_i18n_pofile.save()
po_filepath, ext = os.path.splitext(rosetta_i18n_fn)
save_as_mo_filepath = po_filepath + '.mo'
rosetta_i18n_pofile.save_as_mofile(save_as_mo_filepath)
post_save.send(sender=None, language_code=rosetta_i18n_lang_code, request=request)
# Try auto-reloading via the WSGI daemon mode reload mechanism
if rosetta_settings.WSGI_AUTO_RELOAD and \
'mod_wsgi.process_group' in request.environ and \
request.environ.get('mod_wsgi.process_group', None) and \
'SCRIPT_FILENAME' in request.environ and \
int(request.en | viron.get('mod_wsgi.script_reloading', '0')):
try:
os.utime(request.environ.get('SCRIPT_FILENAME'), None)
except OSError:
pass
# Try auto-reloading via uwsgi daemon reload mechanism
if rosetta_settings.UWSGI_AUTO_RELOAD:
try:
import uwsgi
# pretty easy ri | ght?
uwsgi.reload()
except:
# we may not be running under uwsgi :P
pass
except:
storage.set('rosetta_i18n_write', False)
storage.set('rosetta_i18n_pofile', rosetta_i18n_ |
CroceRossaItaliana/jorvik | social/models.py | Python | gpl-3.0 | 4,591 | 0.001307 | from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.db import models
from base.models import ModelloSemplice
from base.tratti im | port ConMarcaTemporale
class Giudizio(ModelloSemplice, ConMarcaTemporale):
"""
Rappresenta un giudizio sociale ad un oggetto generico.
Utilizzare tramite il tratto ConGiudizio ed i suoi metodi.
"""
class Meta:
verbose_name_plural = "Giudizi"
permissions = (
("view_giudizio", "Can view giudizio"),
)
autore = models.ForeignKey("anagrafica.Persona", db_index=True, related_name="giu | dizi", on_delete=models.CASCADE)
positivo = models.BooleanField("Positivo", db_index=True, default=True)
oggetto_tipo = models.ForeignKey(ContentType, db_index=True, on_delete=models.SET_NULL, null=True)
oggetto_id = models.PositiveIntegerField(db_index=True)
oggetto = GenericForeignKey('oggetto_tipo', 'oggetto_id')
class Commento(ModelloSemplice, ConMarcaTemporale):
"""
Rappresenta un commento sociale ad un oggetto generico.
Utilizzare tramite il tratto ConCommento ed i suoi metodi.
"""
class Meta:
verbose_name_plural = "Commenti"
app_label = "social"
abstract = False
permissions = (
("view_commento", "Can view commento"),
)
autore = models.ForeignKey("anagrafica.Persona", db_index=True, related_name="commenti", on_delete=models.CASCADE)
commento = models.TextField("Testo del commento")
oggetto_tipo = models.ForeignKey(ContentType, db_index=True, on_delete=models.SET_NULL, null=True)
oggetto_id = models.PositiveIntegerField(db_index=True)
oggetto = GenericForeignKey('oggetto_tipo', 'oggetto_id')
LUNGHEZZA_MASSIMA = 1024
class ConGiudizio():
"""
Aggiunge le funzionalita' di giudizio, stile social,
positivi o negativi.
"""
class Meta:
abstract = True
giudizi = GenericRelation(
Giudizio,
related_query_name='giudizi',
content_type_field='oggetto_tipo',
object_id_field='oggetto_id'
)
def giudizio_positivo(self, autore):
"""
Registra un giudizio positivo
:param autore: Autore del giudizio
"""
self._giudizio(autore, True)
def giudizio_negativo(self, autore):
"""
Registra un giudizio negativo
:param autore: Autore del giudizio
"""
self._giudizio(autore, False)
def _giudizio(self, autore, positivo):
"""
Registra un giudizio
:param autore: Autore del giudizio
:param positivo: Vero se positivo, falso se negativo
"""
g = self.giudizio_cerca(autore)
if g: # Se gia' esiste un giudizio, modifico il tipo
g.positivo = positivo
else: # Altrimenti, ne registro uno nuovo
g = Giudizio(
oggetto=self,
positivo=positivo,
autore=autore
)
g.save()
@property
def giudizi_positivi(self):
"""
Restituisce il numero di giudizi positivi associati all'oggetto.
"""
return self._giudizi(self, True)
@property
def giudizi_negativi(self):
"""
Restituisce il numero di giudizi negativi associati all'oggetto.
"""
return self._giudizi(self, False)
def _giudizi(self, positivo):
"""
Restituisce il numero di giudizi positivi o negativi associati all'oggetto.
"""
return self.giudizi.filter(positivo=positivo).count()
def giudizio_cerca(self, autore):
"""
Cerca il giudizio di un autore sull'oggetto. Se non presente,
ritorna None.
"""
g = self.giudizi.filter(autore=autore)[:1]
if g:
return g
return None
class ConCommenti(models.Model):
"""
Aggiunge la possibilita' di aggiungere commenti ad
un oggetto.
"""
class Meta:
abstract = True
commenti = GenericRelation(
Commento,
related_query_name='%(class)s',
content_type_field='oggetto_tipo',
object_id_field='oggetto_id'
)
def commento_notifica_destinatari(self, mittente):
"""
SOVRASCRIVIMI!
Ritorna il queryset di persone che devono ricevere
una notifica ogni volta che un commento viene aggiunto
da un dato mittente.
"""
from anagrafica.models import Persona
return Persona.objects.none()
|
rosscdh/django-paymill-redux | dj_paymill/tests/__init__.py | Python | mit | 590 | 0.011864 | # -*- coding: utf-8 -*-
TRANSLATED_WEBHOOK_DATA = {"stamp": {"serial": "DEV-STAMP"}, "receipt": "2DJ2fkRJQdGhaLwLjIZL9Zpz/84=", "secure": False, "created": "2014-04-26 14:35:43.543350"}
WEBHOOK_POSTED_DATA = {'data': u'W1syODAsMjgxXSxbMjY3LDExMF0sWzkzLDU3XSxbMjAzLDUxXSxbMTAzLDI5MV1d' | }
VALID_VIEW_RESPONSE | = {u'serial': u'DEV-STAMP', u'data': {u'stamp': {}, u'receipt': u'2DJ2fkRJQdGhaLwLjIZL9Zpz/84=', u'secure': False, u'created': u'2014-04-26 14:35:43.543350'}, u'detail': u'Paymill Callback recieved'}
from .test_views import *
from .test_services import *
from .test_signals import * |
ApolloAuto/apollo | modules/tools/record_play/rtk_recorder.py | Python | apache-2.0 | 7,449 | 0.000671 | #!/usr/bin/env python3
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
"""
Record GPS and IMU data
"""
import atexit
import logging
import math
import os
import sys
import time
from cyber.python.cyber_py3 import cyber
from gflags import FLAGS
from modules.tools.common.logger import Logger
import modules.tools.common.proto_utils as proto_utils
from modules.canbus.proto import chassis_pb2
from modules.common.configs.proto import vehicle_config_pb2
from modules.localization.proto import localization_pb2
class RtkRecord(object):
"""
rtk recording class
"""
def write(self, data):
"""Wrap file write function to flush data to disk"""
self.file_handler.write(data)
self.file_handler.flush()
def __init__(self, record_file):
self.firstvalid = False
self.logger = Logger.get_logger("RtkRecord")
self.record_file = record_file
self.logger.info("Record file to: " + record_file)
try:
self.file_handler = open(record_file, 'w')
except IOError:
self.logger.error("Open file %s failed" % (record_file))
self.file_handler.close()
sys.exit(1)
self.write("x,y,z,speed,acceleration,curvature,"
"curvature_change_rate,time,theta,gear,s,throttle,brake,steering\n")
self.localization = localization_pb2.LocalizationEstimate()
self.chassis = chassis_pb2.Chassis()
self.chassis_received = False
self.cars = 0.0
self.startmoving = False
self.terminating = False
self.carcurvature = 0.0
self.prev_carspeed = 0.0
vehicle_config = vehicle_config_pb2.VehicleConfig()
proto_utils.get_pb_from_text_file(
"/apollo/modules/common/data/vehicle_param.pb.txt", vehicle_config)
self.vehicle_param = vehicle_config.vehicle_param
def chassis_callback(self, data):
"""
New message received
"""
if self.terminating is True:
self.logger.info("terminating when receive chassis msg")
return
| self.chassis.CopyFrom(data)
#self.chassis = data
if math.isnan(self.chassis.speed_mps):
self.logger.warning("find nan speed_mps: %s" % str(self.chassis))
if math.isnan(self.chassis.steering_percentage):
self.logger.warning(
"find na | n steering_percentage: %s" % str(self.chassis))
self.chassis_received = True
def localization_callback(self, data):
"""
New message received
"""
if self.terminating is True:
self.logger.info("terminating when receive localization msg")
return
if not self.chassis_received:
self.logger.info(
"chassis not received when localization is received")
return
self.localization.CopyFrom(data)
#self.localization = data
carx = self.localization.pose.position.x
cary = self.localization.pose.position.y
carz = self.localization.pose.position.z
cartheta = self.localization.pose.heading
if math.isnan(self.chassis.speed_mps):
self.logger.warning("find nan speed_mps: %s" % str(self.chassis))
return
if math.isnan(self.chassis.steering_percentage):
self.logger.warning(
"find nan steering_percentage: %s" % str(self.chassis))
return
carspeed = self.chassis.speed_mps
caracceleration = self.localization.pose.linear_acceleration_vrf.y
speed_epsilon = 1e-9
if abs(self.prev_carspeed) < speed_epsilon \
and abs(carspeed) < speed_epsilon:
caracceleration = 0.0
carsteer = self.chassis.steering_percentage
carmax_steer_angle = self.vehicle_param.max_steer_angle
carsteer_ratio = self.vehicle_param.steer_ratio
carwheel_base = self.vehicle_param.wheel_base
curvature = math.tan(math.radians(carsteer / 100
* math.degrees(carmax_steer_angle)) / carsteer_ratio) / carwheel_base
if abs(carspeed) >= speed_epsilon:
carcurvature_change_rate = (curvature - self.carcurvature) / (
carspeed * 0.01)
else:
carcurvature_change_rate = 0.0
self.carcurvature = curvature
cartime = self.localization.header.timestamp_sec
cargear = self.chassis.gear_location
if abs(carspeed) >= speed_epsilon:
if self.startmoving is False:
self.logger.info(
"carspeed !=0 and startmoving is False, Start Recording")
self.startmoving = True
if self.startmoving:
self.cars += carspeed * 0.01
self.write(
"%s, %s, %s, %s, %s, %s, %s, %.4f, %s, %s, %s, %s, %s, %s\n" %
(carx, cary, carz, carspeed, caracceleration, self.carcurvature,
carcurvature_change_rate, cartime, cartheta, cargear,
self.cars, self.chassis.throttle_percentage,
self.chassis.brake_percentage,
self.chassis.steering_percentage))
self.logger.debug(
"started moving and write data at time %s" % cartime)
else:
self.logger.debug("not start moving, do not write data to file")
self.prev_carspeed = carspeed
def shutdown(self):
"""
shutdown node
"""
self.terminating = True
self.logger.info("Shutting Down...")
self.logger.info("File is written into %s" % self.record_file)
self.file_handler.close()
def main(argv):
"""
Main node
"""
node = cyber.Node("rtk_recorder")
argv = FLAGS(argv)
log_dir = "/apollo/data/log"
if len(argv) > 1:
log_dir = argv[1]
if not os.path.exists(log_dir):
os.makedirs(log_dir)
Logger.config(
log_file=log_dir + "rtk_recorder.log",
use_stdout=True,
log_level=logging.DEBUG)
print("runtime log is in %s%s" % (log_dir, "rtk_recorder.log"))
record_file = log_dir + "/garage.csv"
recorder = RtkRecord(record_file)
atexit.register(recorder.shutdown)
node.create_reader('/apollo/canbus/chassis',
chassis_pb2.Chassis,
recorder.chassis_callback)
node.create_reader('/apollo/localization/pose',
localization_pb2.LocalizationEstimate,
recorder.localization_callback)
while not cyber.is_shutdown():
time.sleep(0.002)
if __name__ == '__main__':
cyber.init()
main(sys.argv)
cyber.shutdown()
|
janusnic/py-21v | lambda/3.py | Python | mit | 90 | 0.011111 | # -*- codin | g:utf-8 -*-
print sorted([[3, 4], [3, 5], [1, 2], [7, 3]], key=lambda x: | x[1]) |
AKSW/QuitStore | quit/core.py | Python | gpl-3.0 | 24,569 | 0.001709 | import pygit2
import logging
from copy import copy
from pygit2 import GIT_MERGE_ANALYSIS_UP_TO_DATE
from pygit2 import GIT_MERGE_ANALYSIS_FASTFORWARD
from pygit2 import GIT_MERGE_ANALYSIS_NORMAL
from pygit2 import GIT_SORT_REVERSE, GIT_RESET_HARD, GIT_STATUS_CURRENT
from rdflib import Graph, ConjunctiveGraph, BNode, Literal, URIRef
import re
from quit.conf import Feature, QuitGraphConfiguration
from quit.helpers import applyChangeset
from quit.namespace import RDFS, FOAF, XSD, PROV, QUIT, is_a
from quit.graphs import RewriteGraph, InMemoryAggregatedGraph
from quit.utils import graphdiff, git_timestamp, iri_to_name
from quit.cache import Cache, FileReference
import subprocess
logger = logging.getLogger('quit.core')
class Queryable:
"""A class that represents a querable graph-like object."""
def __init__(self, **kwargs):
pass
def query(self, querystring):
"""Execute a SPARQL select query.
Args:
querystring: A string containing a SPARQL ask or select query.
Returns:
The SPARQL result set
"""
pass
def update(self, querystring):
"""Execute a SPARQL update query and update the store.
This method executes a SPARQL update query and updates and commits all affected files.
Args:
querystring: A string containing a SPARQL upate query.
"""
pass
class Store(Queryable):
"""A class that combines and syncronieses n-quad files and an in-memory quad store.
This class contains information about all graphs, their corresponding URIs and
pathes in the file system. For every Graph (context of Quad-Store) exists a
FileReference object (n-quad) that enables versioning (with git) and persistence.
"""
def __init__(self, store):
"""Initialize a new Store instance."""
self.store = store
return
class MemoryStore(Store):
def __init__(self, additional_bindings=list()):
store = ConjunctiveGraph(identifier='default')
nsBindings = [('quit', QUIT), ('foaf', FOAF), ('prov', PROV)]
for prefix, namespace in nsBindings + additional_bindings:
store.bind(prefix, namespace)
super().__init__(store=store)
class VirtualGraph(Queryable):
def __init__(self, store):
if not isinstance(store, InMemoryAggregatedGraph):
raise Exception()
self.store = store
def query(self, querystring):
return self.store.query(querystring)
def update(self, querystring):
return self.store.update(querystring)
class Quit(object):
"""Quit object which keeps the store syncronised with the repository."""
gcProcess = None
def __init__(self, config, repository, store):
self.config = config
self.repository = repository
self.store = store
self._commits = Cache()
self._blobs = Cache()
self._graphconfigs = Cache()
def _exists(self, cid):
uri = QUIT['commit-' + cid]
for _ in self.store.store.quads((uri, None, None, QUIT.default)):
return True
return False
def getDefaultBranch(self):
"""Get the default branch for the Git repository which should be used in the application.
This will be the default branch as configured, if it is configured or the current HEAD of
the repository if the HEAD is born. Will default to "master"
Returns:
A string containing the branch name.
"""
config_default_branch = self.config.getDefaultBranch()
if config_default_branch:
return config_default_branch
repository_current_head = self.repository.current_head
if repository_current_head:
return repository_current_head
try:
git_config_default_branch = pygit2.Config.get_global_config()['init.defaultBranch']
if git_config_default_branch:
return git_config_default_branch
except KeyError:
pass
return "master"
def rebuild(self):
for context in self.store.contexts():
self.store.remove((None, None, None), context)
self.syncAll()
def syncAll(self):
"""Synchronize store with repository data."""
def traverse(commit, seen):
commits = []
merges = []
while True:
id = commit.id
if id in seen:
break
seen.add(id)
if self._exists(id):
break
commits.append(commit)
parents = commit.parents
if not parents:
break
commit = parents[0]
if len(parents) > 1:
merges.append((len(commits), parents[1:]))
for idx, parents in reversed(merges):
for parent in parents:
commits[idx:idx] = traverse(parent, seen)
return commits
seen = set()
for name in self.repository.tags_or_branches:
initial_commit = self.repository.revision(name)
commits = traverse(initial_commit, seen)
while commits:
commit = commits.pop()
self.syncSingle(commit)
def syncSingle(self, commit):
if not self._exists(commit.id):
self.changeset(commit)
def instance(self, reference, force=False):
"""Create and return dataset for a given commit id.
Args:
reference: commit id or reference of the commit to retrieve
force: force to get the dataset from the git repository instead of the internal cache
Returns:
Instance of VirtualGraph representing the respective dataset
"""
default_graphs = []
commitid = None
if reference:
commit = self.repository.revision(reference)
commitid = commit.id
for blob in self.getFilesForCommit(commit):
try:
(name, oid) = blob
(f, context) = self.getFileReferenceAndContext(blob, commit)
internal_identifier = context.identifier + '-' + str(oid)
if force or not self.config.hasFeature(Feature.Persistence):
g = context
else:
g = RewriteGraph(
self.store.store.store,
internal_identifier,
context.identifier
)
default_graphs.append(g)
except KeyError:
pass
instance = InMemoryAggregatedGraph(
graphs=default_graphs, identifier='default')
return VirtualGraph(instance), commitid
def changeset(self, commit):
if (
| not self.config.hasFeature(Feature.Persistence)
) and (
not self.config.hasFeature(Feature.Provenance)
):
return
g = self.store.store
if self.config.hasFeature(Feature.Provenance):
| role_author_uri = QUIT['Author']
role_committer_uri = QUIT['Committer']
g.add((role_author_uri, is_a, PROV['Role']))
g.add((role_committer_uri, is_a, PROV['Role']))
# Create the commit
i1, commitid = self.instance(commit.id, True)
commit_uri = QUIT['commit-' + commit.id]
if self.config.hasFeature(Feature.Provenance):
g.add((commit_uri, is_a, PROV['Activity']))
if 'Source' in commit.properties.keys():
g.add((commit_uri, is_a, QUIT['Import']))
g.add((commit_uri, is_a, PROV['Usage']))
sources = commit.properties['Source'].strip()
for source in re.findall("<.*?>", sources):
g.add((commit_uri, QUIT['dataSource'], URIRef(source.strip("<>"))))
if 'Query' in commit.properties.keys():
g.add((commit_uri, is_a, QUIT['Transformation']))
g.add((co |
kreuks/liven | nlp/persistor.py | Python | apache-2.0 | 1,267 | 0.001579 | import boto3
import botocore
import tarfile
import os
import shutil
class Persistor(object):
def __init__(self, data_dir, aws_region, bucket_name):
self.data_dir = data_dir
self.s3 = boto3.resource('s3', region_name=aws_region)
self.bucket_name = bucket_name
try:
self.s3.create | _bucket(Bucket=bucket_name, CreateBucketConfiguration={'LocationConstraint': aws_region})
except botocore.exceptions.ClientError, e:
pass # bucket already exists
self.bucket = self.s3.Bucket(bucket_name)
def send_tar_to_s3(self, target_dir):
if not os.path.isdir(target_dir):
raise ValueError('target_dir %r not found.' % target_dir)
base_name = os.path.b | asename(target_dir)
base_dir = os.path.dirname(target_dir)
tarname = shutil.make_archive(base_name, 'gztar', root_dir=base_dir, base_dir=base_name)
filekey = os.path.basename(tarname)
self.s3.Object(self.bucket_name, filekey).put(Body=open(tarname, 'rb'))
def fetch_and_extract(self, filename):
with open(filename, 'wb') as f:
self.bucket.download_fileobj(filename, f)
with tarfile.open(filename, "r:gz") as tar:
tar.extractall(self.data_dir)
|
rockerBOO/aquaticore | aquaticore/references/models.py | Python | mit | 542 | 0.020295 | from django.db import models
from aquati | core.databases.models import Database
from aquaticore.authors.models import Author
class Reference(models.Model):
title = models.CharField(max_length=200)
source = models.CharField(max_length=200)
year = models.IntegerField(4)
author = models.ForeignKey('authors.Author')
database = models.ForeignKey('databases.Database')
created = models.DateTimeField('date | published')
modified = models.DateTimeField(auto_now=True, default='0000-00-00 00:00:00')
def __unicode__(self):
return self.title |
CertainlyUncertain/Kinetic-Gunner-Gunner-of-Angst | inputMgr.py | Python | gpl-3.0 | 8,769 | 0.007527 | # ========================== Start Copyright Notice ========================== #
# #
# Copyright 2014 F.D.I.S. #
# This file is part of Kinetic Gunner: Gunner of Angst #
# #
# For the latest version, please visit: #
# https://github.com/CertainlyUncertain/Kinetic-Gunner-Gunner-of-Angst #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
# =========================== End Copyright Notice =========================== #
# Input Manager -------------------------------------------------------------- #
import ogre.renderer.OGRE as ogre
import ogre.io.OIS as OIS
from vector import Vector3
import os
import time
class InputMgr(OIS.KeyListener, OIS.MouseListener, OIS.JoyStickListener):
''' Manages keyboard and mouse, with buffered and unbuffered input. '''
def __init__(self, engine):
''' Creates Input Listeners and Initializes Variables. '''
se | lf.engine = engine
OIS.KeyListener.__init__(self)
OIS.MouseListener.__init__(self)
OIS.JoyStickListener.__init__(self)
self.move = 1000
self.rotate = 25
self.selectionRadius = 100
self.MB_Left_Down = False
self.MB_Right_Down = False
print "Input Manager Created."
def ini | t(self):
''' Sets the Window and Creates Input System and Objects. '''
windowHandle = 0
renderWindow = self.engine.gfxMgr.root.getAutoCreatedWindow()
windowHandle = renderWindow.getCustomAttributeUnsignedLong("WINDOW")
paramList = [("WINDOW", str(windowHandle))]
if os.name == "nt":
#t = [("w32_mouse","DISCL_FOREGROUND"), ("w32_mouse", "DISCL_NONEXCLUSIVE")]
t = [("w32_mouse","DISCL_FOREGROUND"), ("w32_mouse", "DISCL_EXCLUSIVE")]
else:
t = [("x11_mouse_grab", "true"), ("x11_mouse_hide", "true")]
#t = [("x11_mouse_grab", "false"), ("x11_mouse_hide", "true")]
paramList.extend(t)
self.inputManager = OIS.createPythonInputSystem(paramList)
# Now InputManager is initialized for use. Keyboard and Mouse objects
# must still be initialized separately
self.keyboard = None
self.mouse = None
try:
self.keyboard = self.inputManager.createInputObjectKeyboard(OIS.OISKeyboard, True)
self.mouse = self.inputManager.createInputObjectMouse(OIS.OISMouse, True)
#Joystick
except Exception, e:
print "No Keyboard or mouse!!!!"
raise e
if self.keyboard:
self.keyboard.setEventCallback(self)
if self.mouse:
self.mouse.setEventCallback(self)
self.windowResized( renderWindow )
print "Input Manager Initialized."
def crosslink(self):
''' Links to other Managers. '''
pass
def tick(self, dtime):
''' Update keyboard and mouse. '''
self.keyboard.capture()
self.mouse.capture()
self.handleCamera(dtime)
self.handleModifiers(dtime)
# Quit
if self.keyboard.isKeyDown(OIS.KC_ESCAPE):
self.engine.keepRunning = False
pass
def stop(self):
''' Destory Input Objects and System. '''
self.inputManager.destroyInputObjectKeyboard(self.keyboard)
self.inputManager.destroyInputObjectMouse(self.mouse)
OIS.InputManager.destroyInputSystem(self.inputManager)
self.inputManager = None
print "Input Manager Stopped."
# Keyboard Listener ----------------------------------------------------- #
def keyPressed(self, evt):
'''Handles Toggleable Key Presses'''
# Swap Cameras (Between First-Person and Debug Views)
if self.keyboard.isKeyDown(OIS.KC_G):
self.engine.camMgr.swap()
# Pause ------------------------DEBUG-----------------------------------
if self.keyboard.isKeyDown(OIS.KC_SPACE):
time.sleep(10)
return True
def keyReleased(self, evt):
return True
def handleModifiers(self, dtime):
self.leftShiftDown = self.keyboard.isKeyDown(OIS.KC_LSHIFT)
self.leftCtrlDown = self.keyboard.isKeyDown(OIS.KC_LCONTROL)
pass
def handleCamera(self, dtime):
'''Move the camera using keyboard input.'''
# Forward
if self.keyboard.isKeyDown(OIS.KC_W):
self.engine.camMgr.transVector.z -= self.move
# Backward
if self.keyboard.isKeyDown(OIS.KC_S):
self.engine.camMgr.transVector.z += self.move
# Left
if self.keyboard.isKeyDown(OIS.KC_A):
self.engine.camMgr.transVector.x -= self.move
# Right
if self.keyboard.isKeyDown(OIS.KC_D):
self.engine.camMgr.transVector.x += self.move
# Up
if self.keyboard.isKeyDown(OIS.KC_3):
self.engine.camMgr.transVector.y += self.move
# Down
if self.keyboard.isKeyDown(OIS.KC_4):
self.engine.camMgr.transVector.y -= self.move
# Yaw
if self.keyboard.isKeyDown(OIS.KC_Q):
self.engine.camMgr.yawRot = -self.rotate
# Yaw
if self.keyboard.isKeyDown(OIS.KC_E):
self.engine.camMgr.yawRot = self.rotate
# Pitch
if self.keyboard.isKeyDown(OIS.KC_Z):
self.engine.camMgr.pitchRot = -self.rotate
# Pitch
if self.keyboard.isKeyDown(OIS.KC_X):
self.engine.camMgr.pitchRot = self.rotate
# Roll
if self.keyboard.isKeyDown(OIS.KC_R):
self.engine.camMgr.rollRot = self.rotate
# Roll
if self.keyboard.isKeyDown(OIS.KC_V):
self.engine.camMgr.rollRot = -self.rotate
pass
# MouseListener --------------------------------------------------------- #
def mouseMoved(self, evt):
currMouse = self.mouse.getMouseState()
self.engine.camMgr.yawRot += currMouse.X.rel
self.engine.camMgr.pitchRot += currMouse.Y.rel
return True
def mousePressed(self, evt, id):
#self.mouse.capture()
#self.ms = self.mouse.getMouseState()
#self.ms.width = self.engine.gfxMgr.viewPort.actualWidth
#self.ms.height = self.engine.gfxMgr.viewPort.actualHeight
#self.mousePos = (self.ms.X.abs/float(self.ms.width), self.ms.Y.abs/float(self.ms.height))
if id == OIS.MB_Left:
self.MB_Left_Down = True
elif id == OIS.MB_Right:
self.MB_Right_Down = True
return True
def mouseReleased(self, evt, id):
if id == OIS.MB_Left:
self.MB_Left_Down = False
elif id == OIS.MB_Right:
self.MB_Right_Down = False
return True
# JoystickListener ---------------------------- |
grahamhayes/designate | designate/api/v2/controllers/zones/tasks/exports.py | Python | apache-2.0 | 4,055 | 0 | # Copyright 2015 Rackspace Inc.
#
# Author: Tim Simmons <tim.simmons@rackspae.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from oslo_log import log as logging
from designate import exceptions
from designate import policy
from designate import utils
from designate.api.v2.controllers import rest
from designate.objects.adapters.api_v2.zone_export \
import ZoneExportAPIv2Adapter
LOG = logging.getLogger(__name__)
class ZoneExportController(rest.RestController):
@pecan.expose(template=None, content_type='text/dns')
@utils.validate_uuid('export_id')
def get_all(self, export_id):
context = pecan.request.environ['context']
target = {'tenant_id': context.tenant}
policy.check('zone_export', context, target)
export = self.central_api.get_zone_export(context, export_id)
if export.location and export.location.startswith('designate://'):
return self.zone_manager_api.\
render_zone(context, export['zone_id'])
else:
msg = 'Zone can not be exported synchronously'
raise exceptions.BadRequest(msg)
class ZoneExportCreateController(rest.RestController):
@pecan.expose(template='json:', content_type='application/json')
@utils.validate_uuid('zone_id')
def post_all(self, zone_id):
"""Create Zone Export"""
request = pecan.request
response = pecan.response
context = request.environ['context']
# Create the zone_export
zone_export = self.central_api.create_zone_export(
context, zone_id)
response.status_int = 202
zone_export = ZoneExportAPIv2Adapter.render(
'API_v2', zone_export, request=request)
response.headers['Location'] = zone_export['links']['self']
return zone_export
class ZoneExportsController(rest.RestController):
SORT_KEYS = ['created_at', 'id', 'updated_at']
export = ZoneExportController()
@pecan.expose(template='json:', content_type='application/json')
@utils.validate_uuid('export_id')
def get_one(self, export_id):
"""Get Zone Exports"""
request = pecan.request
context = request.environ['context']
return ZoneExportAPIv2Adapter.render(
'API_v2',
self.central_api.get_zone_export(
context, export_id),
request=request)
@pecan.expose(template='json:', content_type='application/json')
def get_all(self, **params):
"""List Zone Exports"""
request = pecan.request
context = request.environ['context']
marker, limit, sort_key, sort_dir = utils.get_paging_params(
params, self.SORT_KEYS)
# Extract any filter params.
accepted_filters = ('status', 'message', 'zone_id', )
criterion = self._apply_filter_params(
params, accepted_filters, {})
return ZoneExportAPIv2Adapter.render(
'API_v2',
self.central_ap | i.find_zone_exports(
context, criterion, marker, limit, sort_key, sort_dir),
request=request)
@pecan.expose(template='json:', content_type='application/json')
@utils.validate_uuid('zone_export_id')
def delete_one(self, zone_export_id):
"""Delete Zone | Export"""
request = pecan.request
response = pecan.response
context = request.environ['context']
self.central_api.delete_zone_export(context, zone_export_id)
response.status_int = 204
return ''
|
oocran/vbbu | monitor.py | Python | agpl-3.0 | 1,235 | 0.006478 | # monitor compute resources. Called from 'lib/oocran/oocran.c' with dependencies already imported and variables defined previously
cpu = 'cpu_' + NVF + ' value=%s' % psutil.cpu_percent()
dis | k = 'disk_' + NVF + ' value=%s' % psutil.disk_usage('/').percent
ram = 'ram_' + NVF + ' value=%s' % round(psutil.virtual_memory().used, 2)
last_up_down = up_down
upload = psutil | .net_io_counters(pernic=True)[interface][0]
download = psutil.net_io_counters(pernic=True)[interface][1]
t1 = time.time()
up_down = (upload, download)
try:
ul, dl = [(now - last) / (t1 - t0) / 1024.0
for now, last in zip(up_down, last_up_down)]
t0 = time.time()
except:
pass
network_in = 'network_in_' + NVF + ' value=%s' % ul
network_out = 'network_out_' + NVF + ' value=%s' % dl
requests.post("http://%s:8086/write?db=%s" % (IP, DB), auth=(USER, PASSWORD), data=cpu)
requests.post("http://%s:8086/write?db=%s" % (IP, DB), auth=(USER, PASSWORD), data=disk)
requests.post("http://%s:8086/write?db=%s" % (IP, DB), auth=(USER, PASSWORD), data=ram)
requests.post("http://%s:8086/write?db=%s" % (IP, DB), auth=(USER, PASSWORD), data=network_in)
requests.post("http://%s:8086/write?db=%s" % (IP, DB), auth=(USER, PASSWORD), data=network_out)
|
bcoca/ansible-modules-extras | cloud/amazon/lambda_facts.py | Python | gpl-3.0 | 13,282 | 0.003012 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import datetime
import sys
try:
import boto3
from botocore.exceptions import ClientError
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
DOCUMENTATION = '''
---
module: lambda_facts
short_description: Gathers AWS Lambda function details as Ansible facts
description:
- Gathers various details related to Lambda functions, including aliases, versions and event source mappings.
Use module M(lambda) to manage the lambda function itself, M(lambda_alias) to manage function aliases and
M(lambda_event) to manage lambda event source mappings.
version_added: "2.2"
options:
query:
description:
- Specifies the resource type for which to gather facts. Leave blank to retrieve all facts.
required: true
choices: [ "aliases", "all", "config", "mappings", "policy", "versions" ]
default: "all"
function_name:
description:
- The name of the lambda function for which facts are requested.
required: false
default: null
aliases: [ "function", "name"]
event_source_arn:
description:
- For query type 'mappings', this is the Amazon Resource Name (ARN) of the Amazon Kinesis or DynamoDB stream.
default: null
required: false
author: Pierre Jodouin (@pjodouin)
requirements:
- boto3
extends_documentation_fragment:
- aws
'''
EXAMPLES = '''
---
# Simple example of listing all info for a function
- name: List all for a specific function
lambda_facts:
query: all
function_name: myFunction
register: my_function_details
# List all versions of a function
- name: List function versions
lambda_facts:
query: versions
function_name: myFunction
register: my_function_versions
# List all lambda function versions
- name: List all function
lambda_facts:
query: all
max_items: 20
- name: show Lambda facts
debug: var=lambda_facts
'''
RETURN = '''
---
lambda_facts:
description: lambda facts
returned: success
type: dict
lambda_facts.function:
description: lambda function list
returned: success
type: dict
lambda_facts.function.TheName:
description: lambda function information, including event, mapping, and version information
returned: success
type: dict
'''
def fix_return(node):
"""
fixup returned dictionary
:param node:
:return:
"""
if isinstance(node, datetime.datetime):
node_value = str(node)
elif isinstance(node, list):
node_value = [fix_return(item) for item in node]
elif isinstance(node, dict):
node_value = dict([(item, fix_return(node[item])) for item in node.keys()])
else:
node_value = node
return node_value
def alias_details(client, module):
"""
Returns list of aliases for a specified function.
| :param client: AWS API client reference (bo | to3)
:param module: Ansible module reference
:return dict:
"""
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
params = dict()
if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items')
if module.params.get('next_marker'):
params['Marker'] = module.params.get('next_marker')
try:
lambda_facts.update(aliases=client.list_aliases(FunctionName=function_name, **params)['Aliases'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(aliases=[])
else:
module.fail_json(msg='Unable to get {0} aliases, error: {1}'.format(function_name, e))
else:
module.fail_json(msg='Parameter function_name required for query=aliases.')
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
def all_details(client, module):
"""
Returns all lambda related facts.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
if module.params.get('max_items') or module.params.get('next_marker'):
module.fail_json(msg='Cannot specify max_items nor next_marker for query=all.')
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
lambda_facts[function_name] = {}
lambda_facts[function_name].update(config_details(client, module)[function_name])
lambda_facts[function_name].update(alias_details(client, module)[function_name])
lambda_facts[function_name].update(policy_details(client, module)[function_name])
lambda_facts[function_name].update(version_details(client, module)[function_name])
lambda_facts[function_name].update(mapping_details(client, module)[function_name])
else:
lambda_facts.update(config_details(client, module))
return lambda_facts
def config_details(client, module):
"""
Returns configuration details for one or all lambda functions.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
try:
lambda_facts.update(client.get_function_configuration(FunctionName=function_name))
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(function={})
else:
module.fail_json(msg='Unable to get {0} configuration, error: {1}'.format(function_name, e))
else:
params = dict()
if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items')
if module.params.get('next_marker'):
params['Marker'] = module.params.get('next_marker')
try:
lambda_facts.update(function_list=client.list_functions(**params)['Functions'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(function_list=[])
else:
module.fail_json(msg='Unable to get function list, error: {0}'.format(e))
functions = dict()
for func in lambda_facts.pop('function_list', []):
functions[func['FunctionName']] = camel_dict_to_snake_dict(func)
return functions
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
def mapping_details(client, module):
"""
Returns all lambda event source mappings.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
lambda_facts = dict()
params = dict()
function_name = module.params.get('function_name')
if function_name:
params['FunctionName'] = module.params.get('function_name')
if module.params.get('event_source_arn'):
params['EventSourceArn'] = module.params.get('event_source_arn')
if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items')
if module.params.get('next_marker'):
params['Marker'] = module.params.get('next_marker')
try:
lambda_facts.update(mappings=client.list_event_source_mappings(**params)['EventSourceMappings'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(mappings=[])
else:
module.fail_ |
tailhook/pyzza | examples/pacman/main.py | Python | mit | 5,622 | 0.002134 | from layout import TopLevel, RoundRect, Widget, Layout, State, Rel, Constraint
from string import repr
from flash.display import Shape
from game import Frame, Keys
level = """
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
Xg....................................gX
X.XX.XXXXX.XX.XXX.XXXX.XXX.XX.XXXXX.XX.X
X.XX.XXXXX.XX.XXX.XXXX.XXX.XX.XXXXX.XX.X
X.XX.XXXXX.XX.XXX.XXXX.XXX.XX.XXXXX.XX.X
X.XX.......XX.XXX.XXXX.XXX.XX.......XX.X
X.XX.X.XXX.XX.XXX......XXX.XX.XXX.X.XX.X
X.XX.X.XXX.XX.XXX.XXXX.XXX.XX.XXX.X.XX.X
X.XX.X.XXX.XX.XXX.XXXX.XXX.XX.XXX.X.XX.X
X......................................X
X.XX.X.XXX.XX.XXX.XXXX.XXX.XX.XXX.X.XX.X
X.XX.X.XXX.XX.XXX.XXXX.XXX.XX.XXX.X.XX.X
X.XX.X.XXX.XX.XXX.c....XXX.XX.XXX.X.XX.X
X.XX.......XX.XXX.XXXX.XXX.XX.......XX.X
X.XX.XXXXX.XX.XXX.XXXX.XXX.XX.XXXXX.XX.X
X.XX.XXXXX.XX.XXX.XXXX.XXX.XX.XXXXX.XX.X
X.XX.XXXXX.XX.XXX.XXXX.XXX.XX.XXXXX.XX.X
Xg....................................gX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
"""
class Ghost(Shape):
__slots__ = ('fx', 'fy')
def __init__(self, x, y):
self.fx = x
self.fy = y
def draw(self, width, height):
g = self.graphics
g.moveTo(width/4, height/4)
g.lineTo(width*3/4, height/4)
g.lineTo(width*3/4, height*3/8)
g.lineTo(width/2, height*3/8)
g.lineTo(width/2, height*5/8)
g.lineTo(width*3/4, he | ight*5/8)
g.lineTo(width*3/4, height*3/4)
g.lineTo(width/4, height*3/4)
class Meal( | Shape):
__slots__ = ('fx', 'fy', 'amount')
def __init__(self, x, y, amount=10):
self.fx = x
self.fy = y
self.amount = amount
def draw(self, width, height):
g = self.graphics
g.clear()
g.beginFill(0xFF0000)
g.drawCircle(width/2, height/2, min(width, height)/4)
g.endFill()
class Pacman(Shape):
__slots__ = ('fx', 'fy')
def __init__(self, x, y):
self.fx = x
self.fy = y
def start(self):
Keys.register(Keys.LEFT, 'pacman_left')
Keys.register(Keys.RIGHT, 'pacman_right')
Keys.register(Keys.DOWN, 'pacman_down')
Keys.register(Keys.UP, 'pacman_up')
Frame.attach(self.frame)
def stop(self):
Frame.detach(self.frame)
def frame(self, delta):
if Keys.keys.pacman_left:
self.x -= delta*50
if Keys.keys.pacman_right:
self.x += delta*50
if Keys.keys.pacman_up:
self.y -= delta*50
if Keys.keys.pacman_down:
self.y += delta*50
def draw(self, width, height):
g = self.graphics
g.clear()
g.beginFill(0xFFFF00)
g.lineStyle(1, 0xFF0000)
g.moveTo(width/4, height/4)
g.lineTo(width*3/4, height/4)
g.lineTo(width*3/4, height*3/8)
g.lineTo(width/2, height*3/8)
g.lineTo(width/2, height*5/8)
g.lineTo(width*3/4, height*5/8)
g.lineTo(width*3/4, height*3/4)
g.lineTo(width/4, height*3/4)
g.endFill()
class Wall:
__slots__ = ('fx', 'fy')
def __init__(self, x, y):
self.fx = x
self.fy = y
def draw(self, graph, x, y, width, height):
graph.beginFill(0x808080)
graph.drawRect(x, y, width, height)
graph.endFill()
class Field(Widget):
def __init__(self, data, name, states):
super().__init__(name, states)
self.ghosts = []
self.pacman = None
self.walls = {}
self.meals = {}
self.wallsprite = Shape()
self.wallsprite.cacheAsBitmap = True
self.addChild(self.wallsprite)
self.field_width = 0
self.field_height = 0
y = 0
for line in values(data.split('\n')):
x = 0
for i in range(line.length):
c = line.charAt(i)
if ' \n\t\r'.indexOf(c) >= 0:
continue
if c == '.':
self.meals['p{}_{}'.format(x, y)] = Meal(x, y)
elif c == 'g':
self.ghosts.push(Ghost(x, y))
elif c == 'c':
self.pacman = Pacman(x, y)
elif c == 'X':
self.walls['p{}_{}'.format(x, y)] = Wall(x, y)
x += 1
self.field_width = max(x, self.field_width)
if x:
y += 1
self.field_height = y
for m in values(self.meals):
self.addChild(m)
for g in values(self.ghosts):
self.addChild(g)
self.addChild(self.pacman)
def draw(self, width, height):
super().draw(width, height)
w = width/self.field_width
h = height/self.field_height
# drawing walls
wg = self.wallsprite.graphics
wg.clear()
for wall in values(self.walls):
wall.draw(wg, w*wall.fx, h*wall.fy, w, h)
# drawing other objects
for m in values(self.meals):
m.x = w*m.fx
m.y = h*m.fy
m.draw(w, h)
for g in values(self.ghosts):
g.x = w*g.fx
g.y = h*g.fy
p = self.pacman
p.x = w*p.fx
p.y = h*p.fy
self.pacman.draw(w, h)
def start(self):
self.pacman.start()
@package('pacman')
class Main(TopLevel):
def __init__(self):
self.layout = Layout([
Field(level, 'field', {
'normal': State.parse(
'normal:(0,0)-(1,1)'),
}),
])
super().__init__()
Frame.start(self, True)
Keys.start(self.stage)
self.layout.mapping.field.start()
|
wrboyce/autolat | autolat/mobileme.py | Python | bsd-3-clause | 8,757 | 0.002626 | from datetime import datetime
import re
import time
import urllib
import simplejson as json
from actions import Action
from webservice import WebService
class MobileMe(WebService):
loginform_url = 'https://auth.me.com/authenticate'
loginform_data = {
'service': 'account',
'ssoNamespace': 'primary-me',
'reauthorize': 'Y',
'returnURL': 'aHR0cHM6Ly9zZWN1cmUubWUuY29tL2FjY291bnQv',
}
loginform_id = 'LoginForm'
loginform_user_field = 'username'
loginform_pass_field = 'password'
loginform_persist_field = 'keepLoggedIn'
class MultipleDevicesFound(Exception):
pass
def __init__(self, *args, **kwargs):
super(MobileMe, self).__init__(*args, **kwargs)
self.devices = set()
self._devices = {}
self._get_devices()
def _js_post(self, url, data={}, headers={}):
headers.update({
'Accept': 'text/javascript, text/html, application/xml, text/xml, */*',
'X-Requested-With': 'XMLHTTPRequest',
'X-Prototype-Version': '1.6.0.3',
'X-Mobileme-Version': '1.0',
'X-Mobileme-Isc': self._cookiejar._cookies['.secure.me.com']['/']['isc-secure.me.com'].value,
})
return self._post(url, data, headers)
def _auth(self, passwd):
super(MobileMe, self)._auth(passwd)
data = {
'anchor': 'findmyiphone',
'lang': 'en',
}
self._get('https://secure.me.com/wo/WebObjects/Account2.woa', data, headers={'X-Mobileme-Version': '1.0'})
def _get_devices(self):
data = {'lang': 'en'}
url = 'https://secure.me.com/wo/WebObjects/DeviceMgmt.woa?%s' % urllib.urlencode(data)
html = self._js_post(url).read()
for match in re.findall("new Device\(([^)]+)\)", html):
_, id, type, cls, osver, _, _ = match.replace("'", '').split(', ')
self._logger.info('Found device "%s"', id)
self._add_device(id, type, cls, osver)
def _add_device(self, id, type, cls, osver):
self._devices[id] = {
'id': id,
'type': type,
'class': cls,
'osver': osver,
}
def get_devices(self):
return self._devices.keys()
def get_device(self, id=None):
if id is None:
if len(self._devices) == 1:
id = self._devices.keys()[0]
else:
self._logger.error('Multiple devices found and no ID specified, bailing.')
raise MobileMe.MultipleDevicesFound('Device ID must be specified.')
return self._devices[id]
def locate_device(self, device_id=None):
device = self.get_device(device_id)
self._logger.info('Locating device "%(id)s"', device)
body = {
'deviceId': device['id'],
'deviceOsVersion': device['osver'],
}
data = {'postBody': json.dumps(body)}
resp = self._js_post('https://secure.me.com/wo/WebObjects/DeviceMgmt.woa/wa/LocateAction/locateStatus', data)
if resp.code == 200:
return Location(resp.read())
self._logger.error('Locate device "%s" failed!', device['id'])
def msg_device(self, msg, alarm=False, device_id=None):
device = self.get_device(device_id)
self._logger.info('Sending "%s" to device "%s" with%s alarm', msg, device['id'], 'out' if not alarm else '')
body = {
'deviceClass': device['class'],
'deviceId': device['id'],
'deviceOsVersion': device['osver'],
'deviceType': device['type'],
'message': msg,
'playAlarm': 'Y' if alarm else 'N',
}
data = {'postBody': json.dumps(body)}
resp = self._js_post('https://secure.me.com/wo/WebObjects/DeviceMgmt.woa/wa/SendMessageAction/sendMessage', data)
resp_data = json.loads(resp.read())
if resp_data['status'] == 1:
return True
self._logger.error('Sending message to device "%s" failed!', device['id'])
self._logger.debug('%s', resp_data)
def lock_device(self, pin, device_id=None):
pin = str(pin)
if len(pin) != 4 or not pin.isdigit():
self._logger.error('PIN must be 4 digits')
device = self.get_device(device_id)
self._logger.info('Locking device "%s"', device['id'])
body = {
'deviceClass': device['class'],
'deviceId': device['id'],
'deviceOsVersion': device['osver'],
'devicePasscode': pin,
'devicePinConstraint': 'Y',
'deviceType': device['type'],
}
data = {'postBody': json.dumps(body)}
resp = self._js_post('https://secure.me.com/wo/WebObjects/DeviceMgmt.woa/wa/SendRemoteLockAction/sendRemoteLock', data)
resp_data = json.loads(resp.read())
if resp_data['status'] == 1:
return True
self._logger.error('Locking device "%s" failed!', device['id'])
self._logger.debug('%s', resp_data)
class Location(object):
""" Holds location data returned from `MobileMe.WebService`
Attributes:
* accuracy (meters)
* datetime
* is_accurate
* is_locate_finished
* is_location_available
* is_old_location_result
* is_recent
* latitude
* longitude
* status
* status_string
* timestamp
"""
def __init__(self, json_data):
data = json.loads(json_data)
for k, v in data.iteritems():
if k not in ('date', 'time'):
setattr(self, self._uncam | el(k), v)
self.datetime = datetime.strptime('%s %s' % (data['date'], data['time']), '%B %d, | %Y %I:%M %p')
self.timestamp = int(time.mktime(self.datetime.timetuple()))
def __str__(self):
return '(%s, %s) ~%sm @ %s' % (self.latitude, self.longitude, self.accuracy, self.datetime.strftime('%d/%m/%y %H:%M:%S'))
def _uncamel(self, str):
return ''.join('_%s' % c.lower() if c.isupper() else c for c in str)
class MobileMeAction(Action):
required_args = (
('m_user', 'MobileMe Username', False),
('m_pass', 'MobileMe Password', True),
)
def __init__(self, *args, **kwargs):
super(MobileMeAction, self).__init__(*args, **kwargs)
self.parser.add_argument('-m', '--mobileme-user', dest='m_user', help='MobileMe username, will be prompted for if not provided', metavar='MOBILEMEUSER')
self.parser.add_argument('-M', '--mobileme-pass', dest='m_pass', help='MobileMe password, will be prompted for if not provided', metavar='MOBILEMEPASS')
def _with_device(self, inst, func, kwargs):
try:
return func(**kwargs)
except MobileMe.MultipleDevicesFound:
print "Error: Multiple devices found in account:"
for id in inst.devices():
print "\t%s" % id
print
kwargs['device_id'] = raw_input("Select a device: ")
return func(**kwargs)
class MsgDeviceAction(MobileMeAction):
keyword = 'msg_device'
def setup(self):
self.parser.add_argument('-D', '--device', dest='device', help='Device ID', metavar='DEVICE')
self.parser.add_argument('-a', '--alarm', dest='alarm', action='store_true', help='Play a sound for 2 minutes with this message')
self.parser.add_argument('message', nargs='+', help='Message to be sent to device')
def main(self):
m = MobileMe(self.args.m_user, self.args.m_pass)
kwargs = {
'msg': ' '.join(self.args.message),
'alarm': self.args.alarm,
'device_id': self.args.device,
}
return self._with_device(m, m.msg_device, kwargs)
class LocateDeviceAction(MobileMeAction):
keyword = 'locate_device'
def setup(self):
self.parser.add_argument('-D', '--device', dest='device', help='Device ID', metavar='DEVICE')
def main(self):
m = MobileMe(self.args.m_user, self.args.m_pass)
kwargs = {'device_id': self.args.device}
print self._w |
ingadhoc/odoo-saas-manager | addons/saas_product_base/__openerp__.py | Python | agpl-3.0 | 1,644 | 0.002433 | # -*- coding: utf-8 -*-
##############################################################################
#
# Saas Manager
# Copyright (C) 2013 Sistemas ADHOC
# No email
#
# This program is fre | e software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY | WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{ 'active': False,
'author': u'Sistemas ADHOC',
'category': u'base.module_category_knowledge_management',
'demo_xml': [],
'depends': ['base',],
'description': u"""
Base Module for Saas Product Template Database
==============================================
""",
'init_xml': [],
'installable': True,
'license': 'AGPL-3',
'name': u'Saas Product Base Module',
'test': [
],
'update_xml': [
'security/security.xml',
'security/ir.model.access.csv',
'views.xml',
],
'version': u'1.1',
'website': 'www.sistemasadhoc.com.ar'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
nirmeshk/oh-mainline | vendor/packages/sphinx/sphinx/domains/javascript.py | Python | agpl-3.0 | 7,996 | 0.00025 | # -*- coding: utf-8 -*-
"""
sphinx.domains.javascript
~~~~~~~~~~~~~~~~~~~~~~~~~
The JavaScript domain.
:copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from sphinx import addnodes
from sphinx.domains import Domain, ObjType
from sphinx.locale import l_, _
from sphinx.directives import ObjectDescription
from sphinx.roles import XRefRole
from sphinx.domains.python import _pseudo_parse_arglist
from sphinx.util.nodes import make_refnode
from sphinx.util.docfields import Field, GroupedField, TypedField
class JSObject(ObjectDescription):
"""
Description of a JavaScript object.
"""
#: If set to ``True`` this object is callable and a `desc_parameterlist` is
#: added
has_arguments = False
#: what is displayed right before the documentation entry
display_prefix = None
def handle_signature(self, sig, signode):
sig = sig.strip()
| if '(' in sig and sig[-1:] == ')':
prefix, arglist = sig.split('(', 1)
prefix = prefix.strip()
arglist = arglist[:-1].strip()
else:
prefix = sig
arglist = None
if '.' in prefix:
nameprefix, name = prefix.rsplit('.', 1)
else:
nameprefix = | None
name = prefix
objectname = self.env.temp_data.get('js:object')
if nameprefix:
if objectname:
# someone documenting the method of an attribute of the current
# object? shouldn't happen but who knows...
nameprefix = objectname + '.' + nameprefix
fullname = nameprefix + '.' + name
elif objectname:
fullname = objectname + '.' + name
else:
# just a function or constructor
objectname = ''
fullname = name
signode['object'] = objectname
signode['fullname'] = fullname
if self.display_prefix:
signode += addnodes.desc_annotation(self.display_prefix,
self.display_prefix)
if nameprefix:
signode += addnodes.desc_addname(nameprefix + '.', nameprefix + '.')
signode += addnodes.desc_name(name, name)
if self.has_arguments:
if not arglist:
signode += addnodes.desc_parameterlist()
else:
_pseudo_parse_arglist(signode, arglist)
return fullname, nameprefix
def add_target_and_index(self, name_obj, sig, signode):
objectname = self.options.get(
'object', self.env.temp_data.get('js:object'))
fullname = name_obj[0]
if fullname not in self.state.document.ids:
signode['names'].append(fullname)
signode['ids'].append(fullname.replace('$', '_S_'))
signode['first'] = not self.names
self.state.document.note_explicit_target(signode)
objects = self.env.domaindata['js']['objects']
if fullname in objects:
self.state_machine.reporter.warning(
'duplicate object description of %s, ' % fullname +
'other instance in ' +
self.env.doc2path(objects[fullname][0]),
line=self.lineno)
objects[fullname] = self.env.docname, self.objtype
indextext = self.get_index_text(objectname, name_obj)
if indextext:
self.indexnode['entries'].append(('single', indextext,
fullname.replace('$', '_S_'),
''))
def get_index_text(self, objectname, name_obj):
name, obj = name_obj
if self.objtype == 'function':
if not obj:
return _('%s() (built-in function)') % name
return _('%s() (%s method)') % (name, obj)
elif self.objtype == 'class':
return _('%s() (class)') % name
elif self.objtype == 'data':
return _('%s (global variable or constant)') % name
elif self.objtype == 'attribute':
return _('%s (%s attribute)') % (name, obj)
return ''
class JSCallable(JSObject):
"""Description of a JavaScript function, method or constructor."""
has_arguments = True
doc_field_types = [
TypedField('arguments', label=l_('Arguments'),
names=('argument', 'arg', 'parameter', 'param'),
typerolename='func', typenames=('paramtype', 'type')),
GroupedField('errors', label=l_('Throws'), rolename='err',
names=('throws', ),
can_collapse=True),
Field('returnvalue', label=l_('Returns'), has_arg=False,
names=('returns', 'return')),
Field('returntype', label=l_('Return type'), has_arg=False,
names=('rtype',)),
]
class JSConstructor(JSCallable):
"""Like a callable but with a different prefix."""
display_prefix = 'class '
class JSXRefRole(XRefRole):
def process_link(self, env, refnode, has_explicit_title, title, target):
# basically what sphinx.domains.python.PyXRefRole does
refnode['js:object'] = env.temp_data.get('js:object')
if not has_explicit_title:
title = title.lstrip('.')
target = target.lstrip('~')
if title[0:1] == '~':
title = title[1:]
dot = title.rfind('.')
if dot != -1:
title = title[dot+1:]
if target[0:1] == '.':
target = target[1:]
refnode['refspecific'] = True
return title, target
class JavaScriptDomain(Domain):
"""JavaScript language domain."""
name = 'js'
label = 'JavaScript'
# if you add a new object type make sure to edit JSObject.get_index_string
object_types = {
'function': ObjType(l_('function'), 'func'),
'class': ObjType(l_('class'), 'class'),
'data': ObjType(l_('data'), 'data'),
'attribute': ObjType(l_('attribute'), 'attr'),
}
directives = {
'function': JSCallable,
'class': JSConstructor,
'data': JSObject,
'attribute': JSObject,
}
roles = {
'func': JSXRefRole(fix_parens=True),
'class': JSXRefRole(fix_parens=True),
'data': JSXRefRole(),
'attr': JSXRefRole(),
}
initial_data = {
'objects': {}, # fullname -> docname, objtype
}
def clear_doc(self, docname):
for fullname, (fn, _) in self.data['objects'].items():
if fn == docname:
del self.data['objects'][fullname]
def find_obj(self, env, obj, name, typ, searchorder=0):
if name[-2:] == '()':
name = name[:-2]
objects = self.data['objects']
newname = None
if searchorder == 1:
if obj and obj + '.' + name in objects:
newname = obj + '.' + name
else:
newname = name
else:
if name in objects:
newname = name
elif obj and obj + '.' + name in objects:
newname = obj + '.' + name
return newname, objects.get(newname)
def resolve_xref(self, env, fromdocname, builder, typ, target, node,
contnode):
objectname = node.get('js:object')
searchorder = node.hasattr('refspecific') and 1 or 0
name, obj = self.find_obj(env, objectname, target, typ, searchorder)
if not obj:
return None
return make_refnode(builder, fromdocname, obj[0],
name.replace('$', '_S_'), contnode, name)
def get_objects(self):
for refname, (docname, type) in self.data['objects'].iteritems():
yield refname, refname, type, docname, \
refname.replace('$', '_S_'), 1
|
hehongliang/tensorflow | tensorflow/contrib/distribute/python/parameter_server_strategy_test.py | Python | apache-2.0 | 29,905 | 0.006688 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ParameterServerStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import threading
from absl.testing import parameterized
from tensorflow.contrib.distribute.python import combinations
from tensorflow.contrib.distribute.python import multi_worker_test_base
from tensorflow.contrib.distribute.python import parameter_server_strategy
from tensorflow.contrib.distribute.python import strategy_test_lib
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribution_strategy_context as ds_context
from tensorflow.python.distribute import multi_worker_util
from tensorflow.python.distribute import reduce_util
from tensorflow.python.distribute import values
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.estimator import run_config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.layers import core
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import training_util
CHIEF = run_config.TaskType.CHIEF
WORKER = run_config.TaskType.WORKER
PS = run_config.TaskType.PS
def _get_replica_id_integer():
replica_id = ds_context.get_replica_context().replica_id_in_sync_group
if isinstance(replica_id, ops.Tensor):
replica_id = tensor_util.constant_value(replica_id)
return replica_id
class ParameterServerStrategyTestBase(
multi_worker_test_base.MultiWorkerTestBase):
def setUp(self):
self._result = 0
self._lock = threading.Lock()
self._init_condition = threading.Condition()
self._init_reached = 0
self._finish_condition = threading.Condition()
self._finish_reached = 0
self._sess_config = config_pb2.ConfigProto(allow_soft_placement=True)
super(ParameterServerStrategyTestBase, self).setUp()
def _get_test_objects(self, task_type, task_id, num_gpus):
distribution = parameter_server_strategy.ParameterServerStrategy(
num_gpus_per_worker=num_gpus)
if not task_type:
return distribution, '', self._sess_config
sess_config = copy.deepcopy(self._sess_config)
distribution.configure(
session_config=sess_config,
cluster_spec=self._cluster_spec,
task_type=task_type,
task_id=task_id)
return (distribu | tion, 'grpc://' + self._cluster_spec[WORKER][task_id],
sess_config)
def _test_device_assignment_distributed(self, task_type, task_id, num_gpus):
worker_device = '/job:%s/replica:0/task:%d' % (task_type, task_id)
d, _, sess_config = self._get_test_objects(task_type, task_id, num_gpus)
with ops.Graph().as_default(), \
self.cached_session(target=self._default_target,
config=sess_config) as sess, \
d.scope():
# Define a variable ou | tside the call_for_each_replica scope.
n = variable_scope.get_variable('n', initializer=10.0)
self.assertEqual(n.device, '/job:ps/task:0')
def model_fn():
if num_gpus == 0:
last_part_device = 'device:CPU:0'
else:
replica_id = _get_replica_id_integer()
last_part_device = ('device:GPU:%d' % replica_id)
a = constant_op.constant(1.0)
b = constant_op.constant(2.0)
c = a + b
self.assertEqual(a.device, worker_device + '/' + last_part_device)
self.assertEqual(b.device, worker_device + '/' + last_part_device)
self.assertEqual(c.device, worker_device + '/' + last_part_device)
# The device scope is ignored for variables but not for normal ops.
with ops.device('/job:worker/task:0'):
x = variable_scope.get_variable(
'x', initializer=10.0,
aggregation=variable_scope.VariableAggregation.SUM)
x_add = x.assign_add(c)
e = a + c
# The variable x is on the task 1 since the device_function has been
# called once before the model_fn.
self.assertEqual(x.device, '/job:ps/task:1')
self.assertEqual(x_add.device, x.device)
self.assertEqual(e.device,
'/job:worker/replica:0/task:0/%s' % last_part_device)
# The colocate_vars_with can override the distribution's device.
with d.colocate_vars_with(x):
y = variable_scope.get_variable(
'y', initializer=20.0,
aggregation=variable_scope.VariableAggregation.SUM)
# We add an identity here to avoid complaints about summing
# non-distributed values.
y_add = y.assign_add(array_ops.identity(x_add))
self.assertEqual(y.device, '/job:ps/task:1')
self.assertEqual(y_add.device, y.device)
self.assertEqual(y.device, x.device)
z = variable_scope.get_variable(
'z', initializer=10.0,
aggregation=variable_scope.VariableAggregation.SUM)
self.assertEqual(z.device, '/job:ps/task:0')
self.assertNotEqual(z.device, x.device)
with ops.control_dependencies([y_add]):
# We add an identity here to avoid complaints about summing
# non-distributed values.
z_add = z.assign_add(array_ops.identity(y))
with ops.control_dependencies([z_add]):
f = z + c
self.assertEqual(f.device, worker_device + '/' + last_part_device)
# The device scope would merge with the default worker device.
with ops.device('/CPU:1'):
g = e + 1.0
self.assertEqual(g.device, worker_device + '/device:CPU:1')
# Ths ops.colocate_with will be ignored when defining a variale but not
# for a normal tensor.
with ops.colocate_with(x):
u = variable_scope.get_variable('u', initializer=30.0)
v = variable_scope.get_variable('v', initializer=30.0)
h = f + 1.0
self.assertIn('/job:ps/', u.device)
self.assertIn('/job:ps/', v.device)
# u and v are on different parameter servers.
self.assertTrue(u.device != x.device or v.device != x.device)
self.assertTrue(u.device == x.device or v.device == x.device)
# Here h is not on one worker. Note h.device is canonical while x.device
# is not but.
self.assertIn('/job:ps/', h.device)
return y_add, z_add, f
y, z, f = d.call_for_each_replica(model_fn)
self.assertNotEqual(y, None)
self.assertNotEqual(z, None)
self.assertNotEqual(f, None)
if context.num_gpus() >= 1 and num_gpus <= 1:
variables.global_variables_initializer().run()
y_val, z_val, f_val = sess.run([y, z, f])
self.assertEqual(y_val, 33.0)
self.assertEqual(z_val, 43.0)
self.assertEqual(f_val, 46.0)
def _test_device_assignment_distributed_enable_partitioner(
self, task_type, task_id, num_gpus):
d, _, sess_config = self._get_test_objects(task_type, task_id, num_gpus)
nu |
patricklodder/dogecoin | qa/rpc-tests/prioritise_transaction.py | Python | mit | 6,684 | 0.003291 | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test PrioritiseTransaction code
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import COIN, MAX_BLOCK_BASE_SIZE
class PrioritiseTransactionTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
self.txouts = gen_return_txouts()
def setup_network(self):
self.nodes = []
self.is_network_split = False
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-printpriority=1"]))
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug", "-printpriority=1"]))
connect_nodes(self.nodes[0], 1)
self.relayfee = self.nodes[0].getnetworkinfo()['relayfee']
def run_test(self):
utxo_count = 90
utxos = create_confirmed_utxos(self.relayfee, self.nodes[0], utxo_count)
# Note Dogecoin Core 1.14.5 wallet fee is 10x relay fee
base_fee = self.relayfee*100 * 10# our transactions are smaller than 100kb
txids = []
# Create 3 batches of transactions at 3 different fee rate levels
range_size = utxo_count // 3
for i in range(3):
txids.append([])
start_range = i * range_size
end_range = start_range + range_size
txids[i] = create_lots_of_big_transactions(self.nodes[0], self.txouts, utxos[start_range:end_range], end_range - start_range, (i+1)*base_fee)
# Make sure that the size of each group of transactions exceeds
# MAX_BLOCK_BASE_SIZE -- otherwise the test needs to be revised to create
# more transactions.
memp | ool = self.nodes[0].getrawmempool(True)
sizes = [0, 0, 0]
for i | in range(3):
for j in txids[i]:
assert(j in mempool)
sizes[i] += mempool[j]['size']
assert(sizes[i] > MAX_BLOCK_BASE_SIZE) # Fail => raise utxo_count
# add a fee delta to something in the cheapest bucket and make sure it gets mined
# also check that a different entry in the cheapest bucket is NOT mined (lower
# the priority to ensure its not mined due to priority)
self.nodes[0].prioritisetransaction(txids[0][0], 0, int(3*base_fee*COIN))
self.nodes[0].prioritisetransaction(txids[0][1], -1e15, 0)
self.nodes[0].generate(1)
mempool = self.nodes[0].getrawmempool()
print("Assert that prioritised transaction was mined")
assert(txids[0][0] not in mempool)
assert(txids[0][1] in mempool)
high_fee_tx = None
for x in txids[2]:
if x not in mempool:
high_fee_tx = x
# Something high-fee should have been mined!
assert(high_fee_tx != None)
# Add a prioritisation before a tx is in the mempool (de-prioritising a
# high-fee transaction so that it's now low fee).
self.nodes[0].prioritisetransaction(high_fee_tx, -1e15, -int(2*base_fee*COIN))
# Add everything back to mempool
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# Check to make sure our high fee rate tx is back in the mempool
mempool = self.nodes[0].getrawmempool()
assert(high_fee_tx in mempool)
# Now verify the modified-high feerate transaction isn't mined before
# the other high fee transactions. Keep mining until our mempool has
# decreased by all the high fee size that we calculated above.
while (self.nodes[0].getmempoolinfo()['bytes'] > sizes[0] + sizes[1]):
self.nodes[0].generate(1)
# High fee transaction should not have been mined, but other high fee rate
# transactions should have been.
mempool = self.nodes[0].getrawmempool()
print("Assert that de-prioritised transaction is still in mempool")
assert(high_fee_tx in mempool)
for x in txids[2]:
if (x != high_fee_tx):
assert(x not in mempool)
# Create a free, low priority transaction. Should be rejected.
utxo_list = self.nodes[0].listunspent()
assert(len(utxo_list) > 0)
utxo = utxo_list[0]
inputs = []
outputs = {}
inputs.append({"txid" : utxo["txid"], "vout" : utxo["vout"]})
outputs[self.nodes[0].getnewaddress()] = utxo["amount"] - self.relayfee
raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
tx_hex = self.nodes[0].signrawtransaction(raw_tx)["hex"]
txid = self.nodes[0].sendrawtransaction(tx_hex)
# A tx that spends an in-mempool tx has 0 priority, so we can use it to
# test the effect of using prioritise transaction for mempool acceptance
inputs = []
inputs.append({"txid": txid, "vout": 0})
outputs = {}
outputs[self.nodes[0].getnewaddress()] = utxo["amount"] - self.relayfee
raw_tx2 = self.nodes[0].createrawtransaction(inputs, outputs)
tx2_hex = self.nodes[0].signrawtransaction(raw_tx2)["hex"]
tx2_id = self.nodes[0].decoderawtransaction(tx2_hex)["txid"]
try:
self.nodes[0].sendrawtransaction(tx2_hex)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26) # insufficient fee
assert(tx2_id not in self.nodes[0].getrawmempool())
else:
assert(False)
# This is a less than 1000-byte transaction, so just set the fee
# to be the minimum for a 1000 byte transaction and check that it is
# accepted.
self.nodes[0].prioritisetransaction(tx2_id, 0, int(self.relayfee*COIN))
print("Assert that prioritised free transaction is accepted to mempool")
assert_equal(self.nodes[0].sendrawtransaction(tx2_hex), tx2_id)
assert(tx2_id in self.nodes[0].getrawmempool())
# Test that calling prioritisetransaction is sufficient to trigger
# getblocktemplate to (eventually) return a new block.
mock_time = int(time.time())
self.nodes[0].setmocktime(mock_time)
template = self.nodes[0].getblocktemplate()
self.nodes[0].prioritisetransaction(txid, 0, -int(self.relayfee*COIN))
self.nodes[0].setmocktime(mock_time+10)
new_template = self.nodes[0].getblocktemplate()
assert(template != new_template)
if __name__ == '__main__':
PrioritiseTransactionTest().main()
|
TaiSakuma/AlphaTwirl | alphatwirl/selection/factories/LambdaStrFactory.py | Python | bsd-3-clause | 330 | 0.024242 | # Tai Sakuma <tai.sakuma@cern.ch>
##__________________________________________________________________||
def LambdaStrFactory(lambda_str, LambdaStrClass, name = N | one, **kargs):
return LambdaStrClass(lambda_str = lambda_str.format(**kargs), name = name)
##_________________ | _________________________________________________||
|
infobloxopen/infoblox-netmri | infoblox_netmri/api/remote/models/access_provisioning_alerts_rules_grid_remote.py | Python | apache-2.0 | 1,565 | 0.004473 | from ..remote import RemoteModel
class AccessProvisioningAlertsRulesGridRemote(RemoteModel):
"""
list of alerts
| ``id:`` The internal NetMRI identifier for this alert view.
| ``attribute type:`` number
| ``issue_meta_type_id:`` The name of the meta type of this alert. One of 'WhiteList', 'BlackList'.
| ``attribute type:`` string
| ``name:`` The name for this search alert.
| ``attribute type:`` string
| ``sources:`` The list of source network objects used as criteria for this search alert.
| ``attribute type:`` string
| ``destinations:`` The list of destination network objects used as criteria for this search alert.
| ``attribute type:`` string
| ``source_ports:`` The list of service objects used as criteria for this search alert.
| ``attribute type:`` string
| ``services:`` The list of source ports used as criteria for this search alert.
| ``attribute type:`` string
| ``access:`` The allowance of rule used as criteria for this search alert.
| ``attribute type:`` string
| ``workbook_id:`` The internal NetMRI id | entifier of the workbook of rule lists on which the searches for this alert should applied.
| ``attribute type:`` number
"""
properties = ("id",
"issue_meta_type_id",
"name",
"sources",
"destinations",
"source_ports",
"services",
| "access",
"workbook_id",
)
|
intelligent-agent/redeem | tests/gcode/test_G33.py | Python | gpl-3.0 | 2,397 | 0.007092 | from __future__ import absolute_import
import mock
import numpy as np
from .MockPrinter import MockPrinter
from redeem.Gcode import Gcode
class G33_Tests(MockPrinter):
def setUp(self):
pass
def test_G33_properties(self):
self.assertGcodeProperties("G33", is_buffered=True, is_async=True)
@mock.patch("redeem.gcodes.G33.Gcode")
def test_G33_abort_on_bad_factor_count(self, mock_Gcode):
bad_factor_nums = [-1, 0, 1, 2, 5, 7, 10]
for f in bad_factor_nums:
self.execute_gcode("G33 N" + str(f))
mock_Gcode.assert_not_called()
@mock.patch("redeem.gcodes.G33.Gcode")
def test_gcodes_G33_runs_G29_macro(self, mock_Gcode):
self.printer.processor.execute = mock.Mock() # prevent macro command execution
macro_gcodes = self.printer.config.get("Macros", "G29").split("\n")
self.execute_gcode("G33 N3")
""" compare macro from config, to macro commands created using Gcode() inside G33.execute """
for i, v in enumerate(macro_gcodes):
self.assertEqual(v, mock_Gcode.call_args_list[i][0][0]["message"])
def test_gcodes_G33_correct_args_to_autocalibrate_delta_printer(self):
""" Set probe offset to 0. Since processor.execute has been mocked the G33 code will not actually execute G30 commands """
offset_z = self.printer.config.set('Probe', 'offset_z', str(0.000))
self.printer.probe_points = [ # roughly 120 degree equalater | al triangle, 30mm off the deck
{0.080, 0.0, 0.030}, {-0.040, 0.070, 0.030}, {-0.040, -0.070, 0.030}
]
self.printer.probe_heights = [0.031, 0.032, 0.033] # imoderate (arbitrary) build plate angle
self.printer.processor.execute = mock.Mock() # prevent macro command execution
self.printer.path_planner.autocalibrate_delta_printer = mock.Mock()
self.execute_gcode("G33 N3")
""" retrieve args passed to autocalibrate_delta_printer and | compare to expected """
autocal_call_args = self.printer.path_planner.autocalibrate_delta_printer.call_args[0]
self.assertEqual(autocal_call_args[0], 3)
self.assertEqual(autocal_call_args[1], False)
np.testing.assert_array_equal(
autocal_call_args[2],
np.array([set([0.0, 0.03, 0.08]),
set([-0.04, 0.070, 0.03]),
set([-0.04, -0.070, 0.03])]))
np.testing.assert_array_equal(
np.round(autocal_call_args[3], 3), np.array([0.031, 0.032, 0.033]))
|
hzhao/gensim | gensim/models/wrappers/ldamallet.py | Python | gpl-3.0 | 10,772 | 0.00557 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 Radim Rehurek <radimrehurek@seznam.cz>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Python wrapper for Latent Dirichlet Allocation (LDA) from MALLET, the Java topic modelling
toolkit [1]_.
This module allows both LDA model estimation from a training corpus and inference of topic
distribution on new, unseen documents, using an (optimized version of) collapsed
gibbs sampling from MALLET.
MALLET's LDA training requires O(#corpus_words) of memory, keeping the entire corpus in RAM.
If you find yourself running out of memory, either decrease the `workers` constructor
parameter, or use `LdaModel` which needs only O(1) memory.
The wrapped model can NOT be updated with new documents for online training -- use gensim's `LdaModel` for that.
Example:
>>> model = gensim.models.wrappers.LdaMallet('/Users/kofola/mallet-2.0.7/bin/mallet', corpus=my_corpus, num_topics=20, id2word=dictionary)
>>> print model[my_vector] # print LDA topics of a document
.. [1] http://mallet.cs.umass.edu/
"""
import logging
import random
import tempfile
import os
from subprocess import call
import numpy
from gensim import utils, matutils
logger = logging.getLogger(__name__)
def read_doctopics(fname, eps=1e-6):
"""
Yield document topic vectors from MALLET's "doc-topics" format, as sparse gensim vectors.
"""
with utils.smart_open(fname) as fin:
next(fin) # skip the header line
for lineno, line in enumerate(fin):
parts = line.split()[2:] # skip "doc" and "source" columns
if len(parts) % 2 != 0:
raise RuntimeError("invalid doc topics format at line %i in %s" % (lineno + 1, fname))
doc = [(int(id), float(weight)) for id, weight in zip(parts[::2], parts[1::2]) if abs(float(weight)) > eps]
# explicitly normalize probs to sum up to 1.0, just to be sure...
weights = float(sum([weight for _, weight in doc]))
yield [] if weights == 0 else sorted((id, 1.0 * weight / weights) for id, weight in doc)
class LdaMallet(utils.SaveLoad):
"""
Class for LDA training using MALLET. Communication between MALLET and Python
takes place by passing around data files on disk and calling Java with subprocess.call().
"""
def __init__(self, mallet_path, corpus=None, num_topics=100, id2word=None, workers=4, prefix=None,
optimize_interval=0, iterations=1000):
"""
`mallet_path` is path to the mallet executable, e.g. `/home/kofola/mallet-2.0.7/bin/mallet`.
`corpus` is a gensim corpus, aka a stream of sparse document vectors.
`id2word` is a mapping between tokens ids and token.
`workers` is the number of threads, for parallel training.
`prefix` is the string prefix under which all data files will be stored; default: system temp + random filename prefix.
`optimize_interval` optimize hyperparameters every N iterations (sometimes leads to Java exception; 0 to switch off hyperparameter optimization).
`iterations` is the number of sampling iterations.
"""
self.mallet_path = mallet_path
self.id2word = id2word
if self.id2word is None:
logger.warning("no word id mapping provided; initializing from corpus, assuming identity")
self.id2word = utils.dict_from_corpus(corpus)
self.num_terms = len(self.id2word)
else:
self.num_terms = 0 if not self.id2word else 1 + max(self.id2word.keys())
if self.num_terms == 0:
raise ValueError("cannot compute LDA over an empty collection (no terms)")
self.num_topics = num_topics
if prefix is None:
rand_prefix = hex(random.randint(0, 0xffffff))[2:] + '_'
prefix = os.path.join(tempfile.gettempdir(), rand_prefix)
self.prefix = prefix
self.workers = workers
self.optimize_interval = optimize_interval
self.iterations = iterations
if corpus is not None:
self.train(corpus)
def finferencer(self):
return self.prefix + 'inferencer.mallet'
def ftopickeys(self):
return self.prefix + 'topickeys.txt'
def fstate(self):
return self.prefix + 'state.mallet.gz'
def fdoctopics(self):
return self.prefix + 'doctopics.txt'
def fcorpustxt(self):
return self.prefix + 'corpus.txt'
def fcorpusmallet(self):
return self.prefix + 'corpus.mallet'
def fwordweights(self):
return self.prefix + 'wordweights.txt'
def convert_input(self, corpus, infer=False):
"""
Serialize documents (lists of unicode tokens) to a temporary text file,
then convert that text file to MALLET format `outfile`.
"""
logger.info("serializing temporary corpus to %s" % self.fcorpustxt())
# write out the corpus in a file format that MALLET understands: one document per line:
# document id[SPACE]label (not used)[SPACE]whitespace delimited utf8-encoded tokens
with utils.smart_open(self.fcorpustxt(), 'wb') as fout:
for docno, doc in enumerate(corpus):
if self.id2word:
tokens = sum(([self.id2word[tokenid]] * int(cnt) for tokenid, cnt in doc), [])
else:
tokens = sum(([str(tokenid)] * int(cnt) for tokenid, cnt in doc), [])
fout.write(utils.to_utf8("%s 0 %s\n" % (docno, ' '.join(tokens))))
# convert the text file above into MALLET's internal format
cmd = self.mallet_path + " import-file --preserve-case --keep-sequence --remove-stopwords --token-regex '\S+' --input %s --output %s"
if infer:
cmd += ' --use-pipe-from ' + self.fcorpusmallet()
cmd = cmd % (self.fcorpustxt(), self.fcorpusmallet() + '.infer')
else:
cmd = cmd % (self.fcorpustxt(), self.fcorpusmallet())
logger.info("converting temporary corpus to MALLET format with %s" % cmd)
call(cmd, shell=True)
def train(self, corpus):
self.convert_input(corpus, infer=False)
cmd = self.mallet_path + " train-topics --input %s --num-topics %s --optimize-interval %s "\
"--num-threads %s --output-state %s --output-doc-topics %s --output-topic-keys %s "\
"--num-iterations %s --inferencer-filename %s"
cmd = cmd % (self.fcorpusmallet(), self.num_topics, self.optimize_interval, self.workers,
self.fstate(), self.fdoctopics(), self.ftopickeys(), self.iterations, self.finferencer())
# NOTE "--keep-sequence-bigrams" / "--use-ngrams true" poorer results + runs out of memory
logger.info("training MALLET LDA with %s" % cmd)
call(cmd, shell=True)
self.word_topics = self.load_word_topics()
def __getitem__(self, bow, iterations=100):
is_corpus, corpus = utils.is_corpus(bow)
if not is_corpus:
# query is a single document => make a corpus out of it
bow = [bow]
self.convert_input(bow, infer=True)
cmd = self.mallet_path + " infer-topics --input %s --inferencer %s --output-doc-topics %s --num-iterations %s"
cmd = cmd % (self.fcorpusmallet() + '.infer', self.finferencer(), self.fdoctopics() + '.infer', iterations)
logger.info("inferring topics with MALLET LDA '%s'" % cmd)
retval = call(cmd, shell=True)
| if retval != 0:
raise RuntimeError("MALLET failed with error %s on return" % retval)
result = list(read_doctopics(self.fdoctopics() + '.infer'))
return result if is_corpus else result[0]
def load_word_topics(self):
logger.info("loading assigned topics from %s" % self.fstate())
wordtopics = numpy.zeros((self.num_topics, self.num_terms), dtype=numpy.float32) |
with utils.smart_open(self.fstate()) as fin:
_ = next(fin) # header
self.alpha = numpy.array([float(val) for val in next(fin).split()[2:]])
assert len(self.alpha) == self.num_topics, "mismatch between MALLET vs. reques |
google-research/mint | mint/ctl/single_task_trainer.py | Python | apache-2.0 | 8,299 | 0.003735 | # Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A trainer object that can train models with a single output."""
from absl import logging
from third_party.tf_models import orbit
import tensorflow as tf
class IdentityMetric(tf.keras.metrics.Metric):
"""Keras metric to report value at any instant."""
def __init__(self, name, aggregation):
"""Constructor.
Args:
name: Name of the metric.
aggregation: A tf.VariableAggregation method that indicates how to
aggregate values across replicas.
"""
super(IdentityMetric, self).__init__(name=name)
self.value = self.add_weight(
name='/'.join([name, 'value']),
initializer='zeros',
aggregation=aggregation)
def update_state(self, current_value):
"""Update metrics.
Args:
current_value: A scalar value for the metric.
"""
self.value.assign(current_value)
def result(self):
return self.value
class SingleTaskTrainer(orbit.StandardTrainer):
"""Trains a single-output model on a given dataset.
This trainer will handle running a model with one output on a single
dataset. It will apply the provided loss function to the model's output
to calculate gradients and will apply them via the provided optimizer. It will
also supply the output of that model to one or more `tf.keras.metrics.Metric`
objects.
"""
def __init__(self,
train_dataset,
label_key,
model,
loss_fn,
optimizer,
metrics=None,
trainer_options=None,
summary_fn=None,
grad_clip_norm=0.):
"""Initializes a `SingleTaskTrainer` instance.
If the `SingleTaskTrainer` should run its model under a distribution
strategy, it should be created within that strategy's scope.
This trainer will also calculate metrics during training. The loss metric
is calculated by default, but other metrics can be passed to the `metrics`
arg.
Arguments:
train_dataset: A `tf.data.Dataset` or `DistributedDataset` that contains a
string-keyed dict of `Tensor`s.
label_key: The key corresponding to the label value in feature
dictionaries dequeued from `train_dataset`. This key will be removed
from the dictionary before it is passed to the model.
model: A `tf.Module` or Keras `Model` object to evaluate. It must accept a
`training` kwarg.
loss_fn: A per-element loss function of the form (target, output). The
output of this loss function will be reduced via `tf.reduce_mean` to
create the final loss. We recommend using the functions in the
`tf.keras.losses` package or `tf.keras.losses.Loss` objects with
`reduction=tf.keras.losses.reduction.NONE`.
optimizer: A `tf.keras.optimizers.Optimizer` instance.
metrics: A single `tf.keras.me | trics.Metric | ` object, or a list of
`tf.keras.metrics.Metric` objects.
trainer_options: An optional `orbit.utils.StandardTrainerOptions` object.
summary_fn: A function that adds tf.summary on model input and output
tensors.
grad_clip_norm: A float to clip the gradients by global norm.
"""
self.label_key = label_key
self.model = model
self.loss_fn = loss_fn
self.optimizer = optimizer
self.summary_fn = summary_fn
self.grad_clip_norm = grad_clip_norm
# Capture the strategy from the containing scope.
self.strategy = tf.distribute.get_strategy()
self.train_loss = IdentityMetric('training_loss',
tf.VariableAggregation.SUM)
self.task_loss = IdentityMetric('task_loss', tf.VariableAggregation.SUM)
self.regularization_loss = IdentityMetric('regularization_loss',
tf.VariableAggregation.SUM)
self.learning_rate = IdentityMetric(
'learning_rate', tf.VariableAggregation.ONLY_FIRST_REPLICA)
# We need self.metrics to be an iterable later, so we handle that here.
if metrics is None:
self.metrics = []
elif isinstance(metrics, list):
self.metrics = metrics
else:
self.metrics = [metrics]
super(SingleTaskTrainer, self).__init__(
train_dataset=train_dataset, options=trainer_options)
def train_loop_begin(self):
"""Actions to take once, at the beginning of each train loop."""
self.train_loss.reset_states()
self.task_loss.reset_states()
self.regularization_loss.reset_states()
self.learning_rate.reset_states()
for metric in self.metrics:
metric.reset_states()
def train_step(self, iterator):
"""A train step. Called multiple times per train loop by the superclass."""
def train_fn(inputs):
with tf.GradientTape() as tape:
# Extract the target value and delete it from the input dict, so that
# the model never sees it.
target = inputs.pop(self.label_key)
# Get the outputs of the model.
logging.info('*** Features ***')
for name in sorted(inputs.keys()):
logging.info(' name = %s', name)
output = self.model(inputs, training=True)
# Get the average per-batch loss and scale it down by the number of
# replicas. This ensures that we don't end up multiplying our loss by
# the number of workers - gradients are summed, not averaged, across
# replicas during the apply_gradients call.
loss = tf.reduce_mean(self.loss_fn(target, output))
loss = loss / self.strategy.num_replicas_in_sync
# Since we don't use compile/fit api for training, the only losses added
# to the model are regularization losses.
regularization_loss = 0
if self.model.losses:
regularization_loss = tf.add_n(self.model.losses)
regularization_loss = (
regularization_loss / self.strategy.num_replicas_in_sync)
total_loss = loss + regularization_loss
loss_dict = {
'total_loss': total_loss,
'loss:': loss,
'reg_loss': regularization_loss,
}
if self.summary_fn:
self.summary_fn(loss_dict, self.optimizer.iterations)
# Get the gradients by applying the loss to the model's trainable
# variables.
gradients = tape.gradient(total_loss, self.model.trainable_variables)
if self.grad_clip_norm > 0.:
logging.info('Clipping gradient by norm: {:.3f}'.format(
self.grad_clip_norm))
gradients, _ = tf.clip_by_global_norm(gradients, self.grad_clip_norm)
# Apply the gradients via the optimizer.
self.optimizer.apply_gradients(
list(zip(gradients, self.model.trainable_variables)))
# Update metrics.
self.train_loss.update_state(total_loss)
self.task_loss.update_state(loss)
self.regularization_loss.update_state(regularization_loss)
self.learning_rate.update_state(
self.optimizer.learning_rate(self.optimizer.iterations))
for metric in self.metrics:
metric.update_state(target, output)
# This is needed to handle distributed computation.
self.strategy.run(train_fn, args=(next(iterator),))
def train_loop_end(self):
"""Actions to take once after a training loop."""
with self.strategy.scope():
# Export the metrics.
metrics = {metric.name: metric.result() for metric in self.metrics}
metrics[self.train_loss.name] = self.train_loss.result()
metrics[self.task_loss.name] = self.task_loss.result()
metrics[self.regularization_loss.name] = self |
Astroua/plndist | pln_distrib.py | Python | gpl-2.0 | 1,319 | 0.001516 |
'''
Implementation in scipy form of the Double Pareto-Lognormal Distribution
'''
import numpy as np
from scipy.stats import rv_continuous, norm
def _pln_pdf(x, alpha, nu, tau2):
A1 = np.exp(alpha * nu + alpha ** 2 * tau2 / 2)
fofx = alpha * A1 * x ** (-alpha - 1) *\
norm.cdf((np.log(x) - nu - alpha * tau2) / np.sqrt(tau2))
return fofx
def _pln_cdf(x, alpha, nu, tau2):
A1 = np.exp(alpha * nu + alpha ** 2 * tau2 / 2)
term1 = norm.cdf((np.log(x) - nu) / np.sqrt(tau2))
term2 = x ** (-alpha) * A1 * \
norm.cdf((np.log(x) - nu - alpha * tau2) / np.sqrt(tau2))
return term1 - term2
def _pln_logpdf(x, alpha, nu, tau2):
return np.log(alpha) + alpha * nu + alpha * tau2 / 2 - \
(alpha + 1) * np.log(x) + \
norm.logcdf((np.lo | g(x) - nu - alpha * tau2) / np.sqrt(tau2))
def _pln_rawmoments(r, alpha, nu, tau2):
if alpha > r:
return alpha / (alpha - r) * np.exp(r*nu + r**2.*tau2/2)
else:
return np.NaN
class pln_gen(rv_continuous):
def _pdf(self, x, alpha, nu, tau2):
| return _pln_pdf(x, alpha, nu, tau2)
def _logpdf(self, x, alpha, nu, tau2):
return _pln_logpdf(x, alpha, nu, tau2)
def _cdf(self, x, alpha, nu, tau2):
return _pln_cdf(x, alpha, nu, tau2)
pln = pln_gen(name="pln", a=0.0)
|
thomas-hori/Repuge-NG | TeddyMapB.py | Python | mpl-2.0 | 864 | 0.016204 | from TeddyMapBase import TeddyMapBase
from TfGun import TfGun
class TeddyMapB(TeddyMapBase):
#Raw string (r""") because backslashes
# Lab "Other room"
coded_grid=r"""
/------------T-T----------------\
|............>-<................|
|............|%|................|
|............|.|................|
|............|.|................|
|..............|................|
|..............|................| |
|...............................|
|...............................|
|...............................|
`--------------^----------------'
"""
starting_pt=(14,6)
title_window="The Verres' Basement"
def handle_staircase(self,playerobj):
self.game.level_advance(playerobj)
def initialise | (self):
#Place TF-gun
self.gun=TfGun(self.game)
x,y=self.get_new_point()
self.gun.place(x,y,self)
|
woolfson-group/isambard | isambard/ampal/specifications/assembly_specs/coiledcoil.py | Python | mit | 8,988 | 0.000779 | """Contains code for modelling coiled coils and collagens."""
import numpy
from ampal.specifications.polymer_specs.helix import HelicalHelix, _helix_parameters
from ampal.protein import Assembly
basis_set_parameters = {
2: {'name': 'CC-Di', 'pitch': 225.8, 'radius': 5.07, 'interface_angle': 283.56,
'sequence': 'EIAALKQEIAALKKENAALKWEIAALKQ'},
3: {'name': 'CC-Tri', 'pitch': 194.0, 'radius': 6.34, 'interface_angle': 277.12,
'sequence': 'EIAAIKQEIAAIKKEIAAIKWEIAAIKQ'},
4: {'name': 'CC-Tet', 'pitch': 213.2, 'radius': 6.81, 'interface_angle': 279.20,
'sequence': 'ELAAIKQELAAIKKELAAIKWELAAIKQ'},
5: {'name': 'CC-Pent', 'pitch': 182.8, 'radius': 8.62, 'interface_angle': 271.58,
'sequence': 'KIEQILQKIEKILQKIEWILQKIEQILQ'},
6: {'name': 'CC-Hex', 'pitch': 228.4, 'radius': 9.13, 'interface_angle': 273.28,
'sequence': 'ELKAIAQELKAIAKELKAIAWELKAIAQ'},
7: {'name': 'CC-Hept', 'pitch': 328.6, 'radius': 9.80, 'interface_angle': 272.24,
'sequence': 'EIAQALKEIAKALKEIAWALKEIAQALK'},
}
class CoiledCoil(Assembly):
"""Models a coiled-coil protein.
Notes
-----
Instantiating this class using just an oligomeric state is used
to create simple reference models. To build more complex models
use the `from_parameters` classmethod.
Parameters
----------
n : int
The oligomeric state of the model to be built.
auto_build : bool, optional
If `True`, the model will be built as part of instantiation.
Attributes
----------
aas : [int]
Number of amino acids in each minor helix.
basis_set_sequences : [str]
Reference sequences for the oligomeric state that has been
selected, taken from the basis set of coiled coils.
major_radii : [float]
Radii of the | minor helices relative to the super-helical
axis.
major_pitches : [float]
Pitch values of the minor helices relative to the super-helical
axis.
phi_c_alphas :
Relative rotation values of the minor helices relative to
the super-helical axis.
minor_helix_types : [str]
Helix types of the minor helices. Can be: 'alpha', 'pi', '3-10',
'PPI', 'PP2', 'collagen'.
major_handedne | ss : str
Handedness of the super helix.
orientations :
Orientation of helices relative to the super-helical axis. 1
is parallel, -1 is anti-parallel.
minor_repeats : [float]
Hydrophobic repeats of the minor helices.
rotational_offsets :
Rotation of the minor helices relative to the super-helical
axis.
z_shifts : [float]
Translation of the minor helices along the super-helical axis.
oligomeric_state : int
Oligomeric state of the coiled coil.
"""
def __init__(self, n, auto_build=True):
super(CoiledCoil, self).__init__()
# parameters for each polypeptide
# basis set parameters if known, otherwise educated guesses.
if n in basis_set_parameters.keys():
parameters = basis_set_parameters[n]
radius = parameters['radius']
else:
# calculate radius based on extrapolated straight-line fit
# of n Vs radius for basis_set_parameters
radius = (n * 0.966) + 3.279
# other default values just copied from largest oligomer
# in basis_set_parameters.
parameters = basis_set_parameters[max(basis_set_parameters.keys())]
self.major_radii = [radius] * n
self.major_pitches = [parameters['pitch']] * n
self.basis_set_sequences = [parameters['sequence']] * n
self.aas = [len(parameters['sequence'])] * n
self.phi_c_alphas = [parameters['interface_angle']] * n
# alpha-helical barrel with heptad repeat as default.
self.major_handedness = ['l'] * n
self.minor_helix_types = ['alpha'] * n
self.orientations = [1] * n
self.minor_repeats = [3.5] * n
# parameters for the arrangement of each polypeptide
# (evenly distributed, no z-displacement).
self.rotational_offsets = [((i * 360.0) / n) for i in range(n)]
self.z_shifts = [0.0] * n
# parameters for the whole assembly
self.oligomeric_state = n
if auto_build:
self.build()
@classmethod
def from_polymers(cls, polymers):
"""Creates a `CoiledCoil` from a list of `HelicalHelices`.
Parameters
----------
polymers : [HelicalHelix]
List of `HelicalHelices`.
"""
n = len(polymers)
instance = cls(n=n, auto_build=False)
instance.major_radii = [x.major_radius for x in polymers]
instance.major_pitches = [x.major_pitch for x in polymers]
instance.major_handedness = [x.major_handedness for x in polymers]
instance.aas = [x.num_monomers for x in polymers]
instance.minor_helix_types = [x.minor_helix_type for x in polymers]
instance.orientations = [x.orientation for x in polymers]
instance.phi_c_alphas = [x.phi_c_alpha for x in polymers]
instance.minor_repeats = [x.minor_repeat for x in polymers]
instance.build()
return instance
@classmethod
def from_parameters(cls, n, aa=28, major_radius=None, major_pitch=None,
phi_c_alpha=26.42, minor_helix_type='alpha',
auto_build=True):
"""Creates a `CoiledCoil` from defined super-helical parameters.
Parameters
----------
n : int
Oligomeric state
aa : int, optional
Number of amino acids per minor helix.
major_radius : float, optional
Radius of super helix.
major_pitch : float, optional
Pitch of super helix.
phi_c_alpha : float, optional
Rotation of minor helices relative to the super-helical
axis.
minor_helix_type : float, optional
Helix type of minor helices. Can be: 'alpha', 'pi', '3-10',
'PPI', 'PP2', 'collagen'.
auto_build : bool, optional
If `True`, the model will be built as part of instantiation.
"""
instance = cls(n=n, auto_build=False)
instance.aas = [aa] * n
instance.phi_c_alphas = [phi_c_alpha] * n
instance.minor_helix_types = [minor_helix_type] * n
if major_pitch is not None:
instance.major_pitches = [major_pitch] * n
if major_radius is not None:
instance.major_radii = [major_radius] * n
if auto_build:
instance.build()
return instance
@classmethod
def tropocollagen(
cls, aa=28, major_radius=5.0, major_pitch=85.0, auto_build=True):
"""Creates a model of a collagen triple helix.
Parameters
----------
aa : int, optional
Number of amino acids per minor helix.
major_radius : float, optional
Radius of super helix.
major_pitch : float, optional
Pitch of super helix.
auto_build : bool, optional
If `True`, the model will be built as part of instantiation.
"""
instance = cls.from_parameters(
n=3, aa=aa, major_radius=major_radius, major_pitch=major_pitch,
phi_c_alpha=0.0, minor_helix_type='collagen', auto_build=False)
instance.major_handedness = ['r'] * 3
# default z-shifts taken from rise_per_residue of collagen helix
rpr_collagen = _helix_parameters['collagen'][1]
instance.z_shifts = [-rpr_collagen * 2, -rpr_collagen, 0.0]
instance.minor_repeats = [None] * 3
if auto_build:
instance.build()
return instance
def build(self):
"""Builds a model of a coiled coil protein using input parameters."""
monomers = [HelicalHelix(major_pitch=self.major_pitches[i],
major_radius=self.major_radii[i],
major_handedness=self.major_handedness[i],
aa=self.aas[i],
|
nipunagarwala/cs224s_final_project | run.py | Python | mit | 26,610 | 0.007967 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import os
import math
import random
import time
import argparse
import pickle
import shutil
import autocorrect
import scipy
import warnings
from collections import defaultdict
import numpy as np
import tensorflow as tf
from time import gmtime, strftime
from code.config import Config
from code.models import SimpleEmgNN, MultiSharedEmgNN
from code.utils.preprocess import extract_all_features, prep_data, get_separate_mode_features
from code.utils.utils import make_batches, compute_wer, compute_cer
from code.utils.spell import correction
# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
# Usage:
#
# Train a new model on the train directory in Config:
# python run.py [--phase train]
#
# Restore and keep training a new model on train directory in Config:
# python run.py --restore true [--phase train]
#
# Restore and test against the test dataset from Config
# python run.py --phase test [--restore true]
#
# See config.py for additional configuration parameters.
# A copy of config.py as well as labels.pkl is included in
# the Config.checkpoint_dir for posterity
def generate_all_str(sparse_matrix, label_encoder):
"""
Given a sparse matrix in Tensorflow's format representing a decoded
batch from beam search, return the string representation of all
decodings in the batch.
"""
indices, values, shape = sparse_matrix
results = ["" for _ in range(shape[0])]
characters = label_encoder.inverse_transform(values)
# Assumes values are ordered in row-major order.
for (example, timestep), character in zip(indices, characters):
results[example] += character
return results
def generate_str_example(sparse_matrix, example_to_print, label_encoder):
"""
Given a sparse matrix in Tensorflow's format, and
an integer indicating the example_to_print aka row,
iterate over the matrix and return the string representation
of the desired elements.
"""
# TODO Speed this function up with some
# "where row == example_to_print" clauses.
indices, values, shape = sparse_matrix
result_str = ""
for (example, timestep), val in zip(indices, values):
if example == example_to_print:
result_str += label_encoder.inverse_transform(val)
if example > example_to_print:
# Break out early if we're past our point
break
return result_str
def print_details_on_example(example_to_print, split,
samples, lens, transcripts,
beam_decoded, beam_probs,
label_encoder,
show_autocorrect=False,
limit_beam_to=None):
"""
Prints details of `exampl | e_to_print`: its input shape,
active timesteps, target text, and the beam results and their
probabilities as well as their auto-corrected versions.
Inputs:
example_to_print: integer indicating which example from batch
to drill down on
split: a string indicating which split the data is from
(e.g., "train", "dev", "test")
samples: a np.ndarray of shape (batch_size, max_timesteps,
num_features)
lens: a np.ndarray of shape | (batch_size,) in which each
element reflects the number of active timesteps in the
corresponding example in `samples`
transcripts: a sparse 3-tuple; the first element is an array
of indices, the second element is an array of values,
and the third element is an array containing the size;
in dense form, the matrix is batch_size-by-max-length-of-
any-truth-text-in-batch
(see utils.sparse_tuple_from for format)
beam_decoded: first output of tf.nn.ctc_beam_search_decoder
beam_probs: second output of tf.nn.ctc_beam_search_decoder
label_encoder: sklearn.preprocessing.LabelEncoder instance
show_autocorrect: boolean for whether to perform autocorrection;
when True, performance can be very slow because it requires
searching a dictionary for similar words
limit_beam_to: integer or None; None prints entire beam
"""
# TODO: include information about the mode of the sample (silent/audible/etc.)
print("\nSample %d from a %s batch:" % (example_to_print, split))
print(" Input shape (max_timesteps, n_features): ", end="")
print(samples[example_to_print].shape)
print(" Input active timesteps: %d" % lens[example_to_print])
ex_truth = generate_str_example(transcripts, example_to_print, label_encoder)
print(" Target: %s" % ex_truth)
print(" Decoded (top %s, %s autocorrect): " %
("all" if limit_beam_to is None else str(limit_beam_to),
"paired with" if show_autocorrect else "without" ))
for path_id, beam_result in enumerate(beam_decoded):
if limit_beam_to and path_id >= limit_beam_to:
break
ex_prob = beam_probs[example_to_print][path_id]
ex_str = generate_str_example(beam_result, example_to_print, label_encoder)
print(" (%4.1f) %s" % (ex_prob, ex_str))
if show_autocorrect:
ex_str_corr = " ".join([autocorrect.spell(word) for word in ex_str.split()])
print(" %s" % (ex_str_corr))
print()
def create_model(args, session, restore, num_features, alphabet_size):
"""
Returns a model, which has been initialized in `session`.
Re-opens saved model if so instructed; otherwise creates
a new model from scratch.
"""
print("Creating model")
if args.model == 'simple_emg':
model = SimpleEmgNN(Config, num_features, alphabet_size)
elif args.model == 'shared_emg':
model = MultiSharedEmgNN(Config, Config, Config, Config, num_features, alphabet_size)
ckpt = tf.train.latest_checkpoint(Config.checkpoint_dir)
if restore:
if ckpt:
model.saver.restore(session, ckpt)
print("Model restored.")
else:
raise RuntimeError("Cannot restore from nonexistent checkpoint at %s" % ckpt.model_checkpoint_path)
else:
session.run(tf.global_variables_initializer())
try:
session.run(tf.assert_variables_initialized())
except tf.errors.FailedPreconditionError:
raise RuntimeError("Not all variables initialized!")
print("Created model with fresh parameters.")
return model
def run_epoch(args, session, model, samples_tr, sample_lens_tr, transcripts_tr, samples_de,
sample_lens_de, transcripts_de, train_writer,
dev_writer, label_encoder, cur_dev_iter, cur_epoch, mode=None):
epoch_start = time.time()
epoch_losses = []
epoch_weres = []
dev_iter = cur_dev_iter
batched_samples_dev = []
batched_samples, batched_transcripts, \
batched_sample_lens = make_batches(samples_tr,
sample_lens_tr,
transcripts_tr,
Config.batch_size)
for iter, cur_batch_iter in enumerate(range(len(batched_samples))):
# Do training step
batch_start = time.time()
batch_cost, batch_wer, train_summary, beam_decoded, beam_probs = model.train_one_batch(
session,
batched_samples[cur_batch_iter],
batched_transcripts[cur_batch_iter],
batched_sample_lens[cur_batch_iter], mode=mode)
global_step = model.get_global_step(mode).eval()
should_train_report = (global_step % Config.steps_per_train_report == 0)
should_dev_report = (global_step % Config.steps_per_dev_report == 0)
should_checkpoint = ( |
Orav/kbengine | kbe/src/lib/python/Lib/ctypes/test/test_pep3118.py | Python | lgpl-3.0 | 8,041 | 0.006964 | import unittest
from ctypes import *
import re, struct, sys
if sys.byteorder == "little":
THIS_ENDIAN = "<"
OTHER_ENDIAN = ">"
else:
THIS_ENDIAN = ">"
OTHER_ENDIAN = "<"
def normalize(format):
# Remove current endian specifier and white space from a format
# string
if format is None:
return ""
format = format.replace(OTHER_ENDIAN, THIS_ENDIAN)
return re.sub(r"\s", "", format)
class Test(unittest.TestCase):
def test_native_types(self):
for tp, fmt, shape, itemtp in native_types:
ob = tp()
v = memoryview(ob)
try:
self.assertEqual(normalize(v.format), normalize(fmt))
if shape:
self.assertEqual(len(v), shape[0])
else:
self.assertEqual(len(v) * sizeof(itemtp), sizeof(ob))
self.assertEqual(v.itemsize, sizeof(itemtp))
self.assertEqual(v.shape, shape)
# XXX Issue #12851: PyCData_NewGetBuffer() must provide strides
# if requested. memoryview currently reconstructs missing
# stride information, so this assert will fail.
# self.assertEqual(v.strides, ())
# they are always read/write
self.assertFalse(v.readonly)
if v.shape:
n = 1
for dim in v.shape:
n = n * dim
self.assertEqual(n * v.itemsize, len(v.tobytes()))
except:
# so that we can see the failing type
print(tp)
raise
def test_endian_types(self):
for tp, fmt, shape, itemtp in endian_types:
ob = tp()
v = memoryview(ob)
try:
self.assertEqual(v.format, fmt)
if shape:
self.assertEqual(len(v), shape[0])
else:
self.assertEqual(len(v) * sizeof(itemtp), sizeof(ob))
self.assertEqual(v.itemsize, sizeof(itemtp))
self.assertEqual(v.shape, shape)
# XXX Issue #12851
# self.assertEqual(v.strides, ())
# they are always read/write
self.assertFalse(v.readonly)
if v.shape:
n = 1
for dim in v.shape:
n = n * dim
self.assertEqual(n, len(v))
except:
# so that we can see the failing type
print(tp)
raise
# define some structure classes
class Point(Structure):
_fields_ = [("x", c_long), ("y", c_long)]
class PackedPoint(Structure):
_pack_ = 2
_fields_ = [("x", c_long), ("y", c_long)]
class Point2(Structure):
pass
Point2._fields_ = [(" | x", c_long), ("y", c_long)]
cl | ass EmptyStruct(Structure):
_fields_ = []
class aUnion(Union):
_fields_ = [("a", c_int)]
class StructWithArrays(Structure):
_fields_ = [("x", c_long * 3 * 2), ("y", Point * 4)]
class Incomplete(Structure):
pass
class Complete(Structure):
pass
PComplete = POINTER(Complete)
Complete._fields_ = [("a", c_long)]
################################################################
#
# This table contains format strings as they look on little endian
# machines. The test replaces '<' with '>' on big endian machines.
#
native_types = [
# type format shape calc itemsize
## simple types
(c_char, "<c", (), c_char),
(c_byte, "<b", (), c_byte),
(c_ubyte, "<B", (), c_ubyte),
(c_short, "<h", (), c_short),
(c_ushort, "<H", (), c_ushort),
# c_int and c_uint may be aliases to c_long
#(c_int, "<i", (), c_int),
#(c_uint, "<I", (), c_uint),
(c_long, "<l", (), c_long),
(c_ulong, "<L", (), c_ulong),
# c_longlong and c_ulonglong are aliases on 64-bit platforms
#(c_longlong, "<q", None, c_longlong),
#(c_ulonglong, "<Q", None, c_ulonglong),
(c_float, "<f", (), c_float),
(c_double, "<d", (), c_double),
# c_longdouble may be an alias to c_double
(c_bool, "<?", (), c_bool),
(py_object, "<O", (), py_object),
## pointers
(POINTER(c_byte), "&<b", (), POINTER(c_byte)),
(POINTER(POINTER(c_long)), "&&<l", (), POINTER(POINTER(c_long))),
## arrays and pointers
(c_double * 4, "<d", (4,), c_double),
(c_float * 4 * 3 * 2, "<f", (2,3,4), c_float),
(POINTER(c_short) * 2, "&<h", (2,), POINTER(c_short)),
(POINTER(c_short) * 2 * 3, "&<h", (3,2,), POINTER(c_short)),
(POINTER(c_short * 2), "&(2)<h", (), POINTER(c_short)),
## structures and unions
(Point, "T{<l:x:<l:y:}", (), Point),
# packed structures do not implement the pep
(PackedPoint, "B", (), PackedPoint),
(Point2, "T{<l:x:<l:y:}", (), Point2),
(EmptyStruct, "T{}", (), EmptyStruct),
# the pep does't support unions
(aUnion, "B", (), aUnion),
# structure with sub-arrays
(StructWithArrays, "T{(2,3)<l:x:(4)T{<l:x:<l:y:}:y:}", (), StructWithArrays),
(StructWithArrays * 3, "T{(2,3)<l:x:(4)T{<l:x:<l:y:}:y:}", (3,), StructWithArrays),
## pointer to incomplete structure
(Incomplete, "B", (), Incomplete),
(POINTER(Incomplete), "&B", (), POINTER(Incomplete)),
# 'Complete' is a structure that starts incomplete, but is completed after the
# pointer type to it has been created.
(Complete, "T{<l:a:}", (), Complete),
# Unfortunately the pointer format string is not fixed...
(POINTER(Complete), "&B", (), POINTER(Complete)),
## other
# function signatures are not implemented
(CFUNCTYPE(None), "X{}", (), CFUNCTYPE(None)),
]
class BEPoint(BigEndianStructure):
_fields_ = [("x", c_long), ("y", c_long)]
class LEPoint(LittleEndianStructure):
_fields_ = [("x", c_long), ("y", c_long)]
################################################################
#
# This table contains format strings as they really look, on both big
# and little endian machines.
#
endian_types = [
(BEPoint, "T{>l:x:>l:y:}", (), BEPoint),
(LEPoint, "T{<l:x:<l:y:}", (), LEPoint),
(POINTER(BEPoint), "&T{>l:x:>l:y:}", (), POINTER(BEPoint)),
(POINTER(LEPoint), "&T{<l:x:<l:y:}", (), POINTER(LEPoint)),
]
if __name__ == "__main__":
unittest.main()
|
mihaip/NewsBlur | apps/profile/urls.py | Python | mit | 2,216 | 0.006769 | from django.conf.urls import *
from apps.profile import views
urlpatterns = patterns('',
url(r'^get_preferences?/?', views.get_preference),
url(r'^set_preference/?', views.set_preference),
url(r'^set_account_settings/?', views.set_account_settings),
url(r'^get_view_setting/?', views.get_view_setting),
url(r'^set_view_setting/?', views.set_view_setting),
url(r'^clear_view_setting/?', views.clear_view_setting),
url(r'^set_collapsed_folders/?', views.set_collapsed_folders),
url(r'^paypal_form/?', views.paypal_form),
url(r'^paypal_return/?', views.paypal_return, name='paypal-return'),
url(r'^is_premium/?', views.profile_is_premium, name='profile-is-premium'),
url(r'^paypal_ipn/?', include('paypal.standard.ipn.urls'), name='paypal-ipn'),
url(r'^paypal_webhooks/?', include('paypal.standard.ipn.urls'), name='paypal-webhooks'),
url(r'^stripe_form/?', views.stripe_form, name='stripe-form'),
url(r'^activities/?', views.load_activities, name='profile-activities'),
url(r'^payment_history/?', views.payment_history, name='profile-payment-history'),
url(r'^cancel_premium/?', views.cancel_premium, name='profile-cancel-premium'),
url(r'^refund_premium/?', views.refund_premium, name='profile-refund-premium'),
url(r'^never_expire_premium/?', views.never_expire_premium, name='profile-never-expire-premium'),
url(r'^upgrade_premium/?', views.upgrade_premium, name='profile-upgrade-premium'),
url(r'^save_ios_receipt/?', views.save_ios_receipt, name='save-ios-receipt'),
url(r'^update_payment_history/?', views.update_payment_history, name='profil | e-update-payment-history'),
url(r'^delete_account/?', views.delete_account, name='profile-delete-account'),
url(r'^forgot_password_ret | urn/?', views.forgot_password_return, name='profile-forgot-password-return'),
url(r'^forgot_password/?', views.forgot_password, name='profile-forgot-password'),
url(r'^delete_starred_stories/?', views.delete_starred_stories, name='profile-delete-starred-stories'),
url(r'^delete_all_sites/?', views.delete_all_sites, name='profile-delete-all-sites'),
url(r'^email_optout/?', views.email_optout, name='profile-email-optout'),
)
|
emddudley/advent-of-code-solutions | 2015/day-6/advent-day-6-2.py | Python | unlicense | 914 | 0.015317 | # Advent of Code Solutions: Day 6, part 2
# https://github.com/emddudley/advent-of-code-solutions
import re
def twinkle_lights(instruction, lights):
tokens = re.split(r'(\d+)', instruction)
operation = tokens[0].strip()
if operation == 'turn on':
twinkle = lambda x: x + 1
elif operation == 'turn off':
twinkle = lambda x: max(x - 1, 0)
elif operation == 'toggle':
twinkle = lambda x: x + 2
else:
twinkle = lambda x: x
coord_1 = [ int(tokens[1]), int(tokens[3]) ]
coord_2 = [ int(tokens[5]), int(tokens[7]) ]
for x in range(coord_1[0], coord_2[0] + 1):
for y in range(coord_1[1], coord_2[1] + 1):
lights[x][y] = twinkle(lights[x][y])
lights = [ [ 0 | ] * 1000 for | n in range(1000) ]
with open('input', 'r') as input:
for instruction in input:
twinkle_lights(instruction, lights)
print(sum(map(sum, lights)))
|
pressel/mpi4py | test/test_attributes.py | Python | bsd-2-clause | 6,481 | 0.002314 | from mpi4py import MPI
import mpiunittest as unittest
class BaseTestCommAttr(object):
keyval = MPI.KEYVAL_INVALID
def tearDown(self):
self.comm.Free()
if self.keyval != MPI.KEYVAL_INVALID:
self.keyval = MPI.Comm.Free_keyval(self.keyval)
self.assertEqual(self.keyval, MPI.KEYVAL_INVALID)
def testAttr(self, copy_fn=None, delete_fn=None):
self.keyval = MPI.Comm.Create_keyval(copy_fn, delete_fn)
self.assertNotEqual(self.keyval, MPI.KEYVAL_INVALID)
attrval = [1,2,3]
self.comm.Set_attr(self.keyval, attrval)
o = self.comm.Get_attr(self.keyval)
self.assertTrue(o is attrval)
dupcomm = self.comm.Clone()
o = dupcomm.Get_attr(self.keyval)
if copy_fn is True:
self.assertTrue(o is attrval)
elif not copy_fn:
self.assertTrue(o is None)
dupcomm.Free()
self.comm.Delete_attr(self.keyval)
o = self.comm.Get_attr(self.keyval)
self.assertTrue(o is None)
def testAttrCopyFalse(self):
self.testAttr(False)
def testAttrCopyTrue(self):
self.testAttr(True)
def testAttrCopyDelete(self):
self.keyval = MPI.Comm.Create_keyval(
copy_fn=lambda o, k, a: MPI.Comm.Clone(a),
delete_fn=lambda o, k, a: MPI.Comm.Free(a))
self.assertNotEqual(self.keyval, MPI.KEYVAL_INVALID)
comm1 = self.comm
dupcomm1 = comm1.Clone()
comm1.Set_attr(self.keyval, dupcomm1)
self.assertTrue(dupcomm1 != MPI.COMM_NULL)
comm2 = comm1.Clone()
dupcomm2 = comm2.Get_attr(self.keyval)
self.assertTrue(dupcomm1 != dupcomm2)
comm2.Free()
self.assertTrue(dupcomm2 == MPI.COMM_NULL)
self.comm.Delete_attr(self.keyval)
self.assertTrue(dupcomm1 == MPI.COMM_NULL)
class TestCommAttrWorld(BaseTestCommAttr, unittest.TestCase):
def setUp(self):
self.comm = MPI.COMM_WORLD.Dup()
class TestCommAttrSelf(BaseTestCommAttr, unittest.TestCase):
def setUp(self):
self.comm = MPI.COMM_SELF.Dup()
class BaseTestDatatypeAttr(object):
keyval = MPI.KEYVAL_INVALID
def tearDown(self):
self.datatype.Free()
if self.keyval != MPI.KEYVAL_INVALID:
self.keyval = MPI.Datatype.Free_keyval(self.keyval)
self.assertEqual(self.keyval, MPI.KEYVAL_INVALID)
def testAttr(self, copy_fn=None, delete_fn=None):
self.keyval = MPI.Datatype.Create_keyval(copy_fn, delete_fn)
self.assertNotEqual(self.keyval, MPI.KEYVAL_INVALID)
attrval = [1,2,3]
self.datatype.Set_attr(self.keyval, attrval)
o = self.datatype.Get_attr(self.keyval)
self.assertTrue(o is attrval)
dupdatatype = self.datatype.Dup()
o = dupdatatype.Get_attr(self.keyval)
if copy_fn is True:
self.assertTrue(o is attrval)
elif not copy_fn:
self.assertTrue(o is None)
dupdatatype.Free()
self.datatype.Delete_attr(self.keyval)
o = self.datatype.Get_attr(self.keyval)
self.assertTrue(o is None)
def testAttrCopyFalse(self):
self.testAttr(False)
def testAttrCopyTrue(self):
self.testAttr(True)
def testAttrCopyDelete(self):
self.keyval = MPI.Datatype.Create_keyval(
copy_fn=lambda o, k, a: MPI.Datatype.Dup(a),
delete_fn=lambda o, k, a: MPI.Datatype.Free(a))
self.assertNotEqual(self.keyval, MPI.KEYVAL_INVALID)
datatype1 = self.datatype
dupdatatype1 = datatype1.Dup()
datatype1.Set_attr(self.keyval, dupdatatype1)
self.assertTrue(dupdatatype1 != MPI.DATATYPE_NULL)
datatype2 = datatype1.Dup()
dupdatatype2 = datatype2.Get_attr(self.keyval)
self.assertTrue(dupdatatype1 != dupdatatype2)
datatype2.Free()
self.assertTrue(dupdatatype2 == MPI.DATATYPE_NULL)
self.datatype.Delete_attr(self.keyval)
self.assertTrue(dupdatatype1 == MPI.DATATYPE_NULL)
class TestDatatypeAttrBYTE(BaseTestDatatypeAttr, unittest.TestCase):
def setUp(self):
self.datatype = MPI.BYTE.Dup()
class TestDatatypeAttrINT(BaseTestDatatypeAttr, unittest.TestCase):
def setUp(self):
self.datatype = MPI.INT.Dup()
class TestDatatypeAttrFLOAT(BaseTestDatatypeAttr, unittest.TestCase):
def setUp(self):
self.datatype = MPI.FLOAT.Dup()
class TestWinAttr(unittest.TestCase):
keyval = MPI.KEYVAL_INVALID
def setUp(self):
self.win = MPI.Win.Create(MPI.BOTTOM, 1,
MPI.INFO_NULL, MPI.COMM_SELF)
def tearDown(self):
self.win.Free()
if self.keyval != MPI.KEYVAL_INVALID:
self.keyval = MPI.Win.Free_keyval(self.keyval)
self.assertEqual(self.keyval, MPI.KEYVAL_INVALID)
def testAttr(self, copy_fn=None, delete_fn=None):
self.keyval = MPI.Win.Create_keyval(copy_fn, delete_fn)
self.assertNot | Equal(self.keyval, MPI.KEYVAL_INVALID)
attrval = [1,2,3]
self.win.Set_attr | (self.keyval, attrval)
o = self.win.Get_attr(self.keyval)
self.assertTrue(o is attrval)
self.win.Delete_attr(self.keyval)
o = self.win.Get_attr(self.keyval)
self.assertTrue(o is None)
def testAttrCopyDelete(self):
self.keyval = MPI.Win.Create_keyval(
delete_fn=lambda o, k, a: MPI.Win.Free(a))
self.assertNotEqual(self.keyval, MPI.KEYVAL_INVALID)
newwin = MPI.Win.Create(MPI.BOTTOM, 1,
MPI.INFO_NULL, MPI.COMM_SELF)
#
self.win.Set_attr(self.keyval, newwin)
self.assertTrue(newwin != MPI.WIN_NULL)
#
self.win.Delete_attr(self.keyval)
self.assertTrue(newwin == MPI.WIN_NULL)
try:
k = MPI.Datatype.Create_keyval()
k = MPI.Datatype.Free_keyval(k)
except NotImplementedError:
del TestDatatypeAttrBYTE
del TestDatatypeAttrINT
del TestDatatypeAttrFLOAT
try:
k = MPI.Win.Create_keyval()
k = MPI.Win.Free_keyval(k)
except NotImplementedError:
del TestWinAttr
name, version = MPI.get_vendor()
if name == 'Open MPI':
if version < (1,5,2):
if MPI.Query_thread() > MPI.THREAD_SINGLE:
del BaseTestCommAttr.testAttrCopyDelete
del TestWinAttr.testAttrCopyDelete
if name == 'Platform MPI':
del TestWinAttr.testAttrCopyDelete
if __name__ == '__main__':
unittest.main()
|
google/mirandum | alerts/ytsubs/migrations/0003_auto_20160418_0455.py | Python | apache-2.0 | 2,254 | 0.002662 | # -*- coding: utf-8 -*-
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2 | .0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permi | ssions and
# limitations under the License.
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0006_updater_updaterevent'),
('ytsubs', '0002_auto_20160414_0317'),
]
operations = [
migrations.RemoveField(
model_name='subevent',
name='id',
),
migrations.RemoveField(
model_name='subevent',
name='sub_id',
),
migrations.RemoveField(
model_name='subupdate',
name='failure_count',
),
migrations.RemoveField(
model_name='subupdate',
name='id',
),
migrations.RemoveField(
model_name='subupdate',
name='last_failure',
),
migrations.RemoveField(
model_name='subupdate',
name='last_failure_message',
),
migrations.RemoveField(
model_name='subupdate',
name='last_update',
),
migrations.AddField(
model_name='subevent',
name='updaterevent_ptr',
field=models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, default=2, serialize=False, to='main.UpdaterEvent'),
preserve_default=False,
),
migrations.AddField(
model_name='subupdate',
name='updater_ptr',
field=models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, default=2, serialize=False, to='main.Updater'),
preserve_default=False,
),
]
|
unnikrishnankgs/va | venv/lib/python3.5/site-packages/PIL/XpmImagePlugin.py | Python | bsd-2-clause | 3,101 | 0.000322 | #
# The Python Imaging Library.
# $Id$
#
# XPM File handling
#
# History:
# 1996-12-29 fl Created
# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7)
#
# Copyright (c) Secret Labs AB 1997-2001.
# Copyright (c) Fredrik Lundh 1996-2001.
#
# See the README file for information on usage and redistribution.
#
import re
from . import Image, ImageFile, ImagePalette
from ._binary import i8, o8
__version__ = "0.2"
# XPM header
xpm_head = re.compile(b"\"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)")
def _accept(prefix):
return prefix[:9] == b"/* XPM */"
##
# Image plugin for X11 pixel maps.
class XpmImageFile(ImageFile.ImageFile):
format = "XPM"
format_description = "X11 Pixel Map"
def _open(self):
if not _accept(self.fp.read(9)):
raise SyntaxError("not an XPM file")
# skip forward to next string
while True:
s = self.fp.readline()
if not s:
raise SyntaxError("broken XPM file")
m = xpm_head.match(s)
if m:
break
self.size = int(m.group(1)), int(m.group(2))
pal = int(m.group(3))
bpp = int(m.group(4))
if pal > 256 or bpp != 1:
raise ValueError("cannot read this XPM file")
#
# load palette description
palette = [b"\0\0\0"] * 256
for i in range(pal):
s = self.fp.readline()
if s[-2:] == b'\r\n':
s = s[:-2]
elif s[-1:] in b'\r\n':
s = s[:-1]
c = i8(s[1])
s = s[2:-2].split()
for i in range(0, len(s), 2):
if s[i] == b"c":
# process colour key
rgb = s[i+1]
if rgb == b"None":
self.info["transparency"] = c
elif rgb[0:1] == b"#":
# FIXME: handle colour names (see ImagePalette.py)
r | gb = int(rgb[1:], 16)
palette[c] = (o8((rgb >> 16) & 255) +
o8((rgb >> 8) & 255) +
o8(rgb & 255))
else:
# unknown colour
raise ValueE | rror("cannot read this XPM file")
break
else:
# missing colour key
raise ValueError("cannot read this XPM file")
self.mode = "P"
self.palette = ImagePalette.raw("RGB", b"".join(palette))
self.tile = [("raw", (0, 0)+self.size, self.fp.tell(), ("P", 0, 1))]
def load_read(self, bytes):
#
# load all image data in one chunk
xsize, ysize = self.size
s = [None] * ysize
for i in range(ysize):
s[i] = self.fp.readline()[1:xsize+1].ljust(xsize)
return b"".join(s)
#
# Registry
Image.register_open(XpmImageFile.format, XpmImageFile, _accept)
Image.register_extension(XpmImageFile.format, ".xpm")
Image.register_mime(XpmImageFile.format, "image/xpm")
|
tomaaron/raiden | raiden/tasks.py | Python | mit | 50,038 | 0.000959 | # -*- coding: utf-8 -*-
# pylint: disable=too-many-lines
import logging
import random
import time
import gevent
from gevent.event import AsyncResult
from gevent.queue import Empty, Queue
from gevent.timeout import Timeout
from random import randint
from ethereum import slogging
from ethereum.utils import sha3
from raiden.messages import (
MediatedTransfer,
RefundTransfer,
RevealSecret,
Secret,
SecretRequest,
TransferTimeout,
)
from raiden.utils import lpex, pex
__all__ = (
'StartMediatedTransferTask',
'MediateTransferTask',
'EndMediatedTransferTask',
)
log = slogging.get_logger(__name__) # pylint: disable=invalid-name
REMOVE_CALLBACK = object()
DEFAULT_EVENTS_POLL_TIMEOUT = 0.5
DEFAULT_HEALTHCHECK_POLL_TIMEOUT = 1
ESTIMATED_BLOCK_TIME = 7
TIMEOUT = object()
class Task(gevent.Greenlet):
""" Base class used to created tasks.
Note:
Always call super().__init__().
"""
def __init__(self):
super(Task, self).__init__()
self.response_queue = Queue()
def on_completion(self, success):
self.transfermanager.on_task_completed(self, success)
return success
def on_response(self, response):
""" Add a new response message to the task queue. """
if log.isEnabledFor(logging.DEBUG):
log.debug(
'RESPONSE MESSAGE RECEIVED %s %s',
repr(self),
response,
)
self.response_queue.put(response)
class HealthcheckTask(Task):
""" Task for checking if all of our open channels are healthy """
def __init__(
self,
raiden,
send_ping_time,
max_unresponsive_time,
sleep_time=DEFAULT_HEALTHCHECK_POLL_TIMEOUT):
""" Initialize a HealthcheckTask that will monitor open channels for
responsiveness.
:param raiden RaidenService: The Raiden service which will give us
access to the protocol object and to
the asset manager
:param int sleep_time: Time in seconds between each healthcheck task
:param int send_ping_time: Time in seconds after not having received
a message from an address at which to send
a Ping.
:param int max_unresponsive_time: Time in seconds after not having received
a message from an address at which it
should be deleted.
"""
super(HealthcheckTask, self).__init__()
self.protocol = raiden.protocol
self.raiden = raiden
self.stop_event = AsyncResult()
self.sleep_time = sleep_time
self.send_ping_time = send_ping_time
self.max_unresponsive_time = max_unresponsive_time
def _run(self): # pylint: disable=method-hidden
stop = None
while stop is None:
keys_to_remove = []
for key, queue in self.protocol.address_queue.iteritems():
receiver_address = key[0]
asset_address = key[1]
if queue.empty():
elapsed_time = (
time.time() - self.protocol.last_received_time[receiver_address]
)
# Add a randomized delay in the loop to not clog the network
gevent.sleep(randint(0, int(0.2 * self.send_ping_time)))
if elapsed_time > self.max_unresponsive_time:
# remove the node from the graph
asset_manager = self.raiden.get_manager_by_asset_address(
asset_address
)
asset_manager.channelgraph.remove_path(
self.protocol.raiden.address,
receiver_address
)
# remove the node from the queue
keys_to_remove.append(key)
elif elapsed_time > self.send_ping_time:
self.protocol.send_ping(receiver_address)
for key in keys_to_remove:
self.protocol.address_queue.pop(key)
self.timeout = Timeout(self.sleep_time) # wait() will call cancel()
stop = self.stop_event.wait(self.timeout)
def stop_and_wait(self):
self.stop_event.set(True)
gevent.wait(self)
def stop_async(self):
self.stop_event.set(True)
class AlarmTask(Task):
""" Task to notify when a block is mined. """
def __init__(self, chain):
super(AlarmTask, self).__init__()
self.callbacks = list()
self.stop_event = AsyncResult()
self.chain = chain
self.last_block_number = self.chain.block_number()
# TODO: Start with a larger wait_time and decrease it as the
# probability of a new block increases.
self.wait_time = 0.5
def register_callback(self, callback):
""" Register a new callback.
Note:
This callback will be executed in the AlarmTask context and for
this reason it should not block, otherwise we can miss block
changes.
"""
if not callable(callback):
raise ValueError('callback is not a callable')
self.callbacks.append(callback)
def _run(self): # pylint: disable=method-hidden
stop = None
result = None
last_loop = time.time()
log.debug('starting block number', block_number=self.last_block_number)
while stop is None:
current_block = self.chain.block_number()
if current_block > self.last_block_number + 1:
difference = current_block - self.last_block_number - 1
log.error(
'alarm missed %s blocks',
difference,
)
if current_block != self.last_block_number:
self.last_block_number = current_block
log.debug('new block', number=current_block, timestamp=last_loop)
remove = list()
for callback in self.callbacks:
try:
result = callback(current_block)
except: # pylint: disable=bare-except
log.exception('unexpected exception on alarm')
else:
if result is REMOVE_CALLBAC | K:
remove.append(callback)
for callback in remove:
self.callbacks.remove(callback)
# we want this task to iterate i | n the tick of `wait_time`, so take
# into account how long we spent executing one tick.
work_time = time.time() - last_loop
if work_time > self.wait_time:
log.warning(
'alarm loop is taking longer than the wait time',
work_time=work_time,
wait_time=self.wait_time,
)
sleep_time = 0.001
else:
sleep_time = self.wait_time - work_time
stop = self.stop_event.wait(sleep_time)
last_loop = time.time()
def stop_and_wait(self):
self.stop_event.set(True)
gevent.wait(self)
def stop_async(self):
self.stop_event.set(True)
class BaseMediatedTransferTask(Task):
def _send_and_wait_time(self, raiden, recipient, transfer, timeout):
""" Utility to handle multiple messages for the same hashlock while
properly handling expiration timeouts.
"""
current_time = time.time()
limit_time = current_time + timeout
raiden.send_async(recipient, transfer)
while current_time <= limit_time:
# wait for a response message (not the Ack for the transfer)
try:
response = self.response_queue.get(
timeout=limit_time - current |
operasoftware/presto-testo | wpt/websockets/websock_handlers/after_handshake_raw_wsh.py | Python | bsd-3-clause | 264 | 0.015152 | #!/usr/bin/python
from mod_pywebsocket import msgutil
import urllib
def web_socket_do_extra_handshake(reque | st):
pass
def web_socket_transfer_data(request):
msgutil._write(request, urllib.unquote(request.ws_location.split('?', | 1)[1]).decode("string-escape"))
|
miketheman/opencomparison | package/models.py | Python | mit | 11,990 | 0.002252 | from datetime import datetime, timedelta
import json
import re
from django.core.cache import cache
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.utils.translation import ugettext_lazy as _
from distutils.version import LooseVersion as versioner
import requests
from core.utils import STATUS_CHOICES, status_choices_switch
from core.models import BaseModel
from package.repos import get_repo_for_repo_url
from package.signals import si | gnal_fetch_latest_metadata
from package.utils import get_version, get_pypi_version
repo_url_help_text = settings.PACKAGINATOR_HELP_TEXT['REPO_URL']
pypi_url_help_te | xt = settings.PACKAGINATOR_HELP_TEXT['PYPI_URL']
class NoPyPiVersionFound(Exception):
pass
class Category(BaseModel):
title = models.CharField(_("Title"), max_length="50")
slug = models.SlugField(_("slug"))
description = models.TextField(_("description"), blank=True)
title_plural = models.CharField(_("Title Plural"), max_length="50", blank=True)
show_pypi = models.BooleanField(_("Show pypi stats & version"), default=True)
class Meta:
ordering = ['title']
verbose_name_plural = 'Categories'
def __unicode__(self):
return self.title
class Package(BaseModel):
title = models.CharField(_("Title"), max_length="100")
slug = models.SlugField(_("Slug"), help_text="Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens.<br />Values will be converted to lowercase.", unique=True)
category = models.ForeignKey(Category, verbose_name="Installation")
repo_description = models.TextField(_("Repo Description"), blank=True)
repo_url = models.URLField(_("repo URL"), help_text=repo_url_help_text, blank=True, unique=True, verify_exists=True)
repo_watchers = models.IntegerField(_("repo watchers"), default=0)
repo_forks = models.IntegerField(_("repo forks"), default=0)
pypi_url = models.URLField(_("PyPI slug"), help_text=pypi_url_help_text, blank=True, default='', verify_exists=True)
pypi_downloads = models.IntegerField(_("Pypi downloads"), default=0)
participants = models.TextField(_("Participants"),
help_text="List of collaborats/participants on the project", blank=True)
usage = models.ManyToManyField(User, blank=True)
created_by = models.ForeignKey(User, blank=True, null=True, related_name="creator", on_delete=models.SET_NULL)
last_modified_by = models.ForeignKey(User, blank=True, null=True, related_name="modifier", on_delete=models.SET_NULL)
commit_list = models.TextField(_("Commit List"), blank=True)
@property
def pypi_name(self):
""" return the pypi name of a package"""
if not self.pypi_url.strip():
return ""
name = self.pypi_url.replace("http://pypi.python.org/pypi/", "")
if "/" in name:
return name[:name.index("/")]
return name
@property
def last_updated(self):
try:
last_commit = self.commit_set.latest('commit_date')
if last_commit:
return last_commit.commit_date
except ObjectDoesNotExist:
pass
return None
@property
def repo(self):
return get_repo_for_repo_url(self.repo_url)
@property
def active_examples(self):
return self.packageexample_set.filter(active=True)
@property
def license_latest(self):
try:
return self.version_set.latest().license
except Version.DoesNotExist:
return "UNKNOWN"
def grids(self):
return (x.grid for x in self.gridpackage_set.all())
def repo_name(self):
return re.sub(self.repo.url_regex, '', self.repo_url)
def repo_info(self):
return dict(
username=self.repo_name().split('/')[0],
repo_name=self.repo_name().split('/')[1],
)
def participant_list(self):
return self.participants.split(',')
def get_usage_count(self):
return self.usage.count()
def commits_over_52(self):
if self.commit_list:
result = self.commit_list
else:
result = str([0 for x in range(52)])
return result.replace(" ", "").replace("[", "").replace("]", "")
def fetch_pypi_data(self, *args, **kwargs):
# Get the releases from pypi
if self.pypi_url.strip() and self.pypi_url != "http://pypi.python.org/pypi/":
total_downloads = 0
url = "https://pypi.python.org/pypi/{0}/json".format(self.pypi_name)
response = requests.get(url)
if settings.DEBUG:
if response.status_code not in (200, 404):
print("BOOM!")
print(self, response.status_code)
if response.status_code == 404:
if settings.DEBUG:
print("BOOM!")
print(self, response.status_code)
return False
release = json.loads(response.content)
info = release['info']
version, created = Version.objects.get_or_create(
package=self,
number=info['version']
)
# add to versions
license = info['license']
if not info['license'] or not license.strip() or 'UNKNOWN' == license.upper():
for classifier in info['classifiers']:
if classifier.strip().startswith('License'):
# Do it this way to cover people not quite following the spec
# at http://docs.python.org/distutils/setupscript.html#additional-meta-data
license = classifier.strip().replace('License ::', '')
license = license.replace('OSI Approved :: ', '')
break
if license and len(license) > 100:
license = "Other (see http://pypi.python.org/pypi/%s)" % self.pypi_name
version.license = license
#version stuff
try:
url_data = release['urls'][0]
version.downloads = url_data['downloads']
version.upload_time = url_data['upload_time']
except IndexError:
# Not a real release so we just guess the upload_time.
version.upload_time = version.created
version.hidden = info['_pypi_hidden']
for classifier in info['classifiers']:
if classifier.startswith('Development Status'):
version.development_status = status_choices_switch(classifier)
break
for classifier in info['classifiers']:
if classifier.startswith('Programming Language :: Python :: 3'):
version.supports_python3 = True
break
version.save()
self.pypi_downloads = total_downloads
# Calculate total downloads
return True
return False
def fetch_metadata(self, fetch_pypi=True):
if fetch_pypi:
self.fetch_pypi_data()
self.repo.fetch_metadata(self)
signal_fetch_latest_metadata.send(sender=self)
self.save()
def save(self, *args, **kwargs):
if not self.repo_description:
self.repo_description = ""
super(Package, self).save(*args, **kwargs)
def fetch_commits(self):
self.repo.fetch_commits(self)
def pypi_version(self):
cache_name = self.cache_namer(self.pypi_version)
version = cache.get(cache_name)
if version is not None:
return version
version = get_pypi_version(self)
cache.set(cache_name, version)
return version
def last_released(self):
cache_name = self.cache_namer(self.last_released)
version = cache.get(cache_name)
if version is not None:
return version
version = get_version(self)
cache.set(cache_name, version)
return version
@pro |
relayr/python-sdk | demos/relayr_inventory.py | Python | mit | 4,257 | 0.003054 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Scan all relayr resources of a given user via the | relayr API.
This should be useful for finding out IDs for all devices of your
account, see which devices are connected to which apps, or which
channels are open on a d | evice, etc.
All that is needed is a relayr access token from the relayr developer
dashboard to be provided on the command-line:
https://developer.relayr.io/dashboard/apps/myApps
Output can be in JSON or YAML (the latter wasn't much tested).
To-do:
- remove some redundancy w.r.t. device models
Sample usage:
relayr_inventory.py -h
relayr_inventory.py --format json > mystuff.json
relayr_inventory.py --format yaml > mystuff.yaml
Sample JSON output (much shortened, using fake IDs):
{
"app": {
"token": {
"expiry-date": "2016-03-24T10:25:31.082Z",
"value": "_imkFZ3QGDRCnQECAvL4YosX31WMDdYV"
},
"id": "47571d73-ec49-11e5-a867-6c400890724a",
"name": "My best app"
},
"user": {
"email": "joe@foo.com",
"id": "507ec061-ec49-11e5-9888-6c400890724a",
"name": "joefoo"
},
"publishers": [ ... ],
"device-groups": [ ... ],
"device": [ ... ],
"bookmarked-devices": []
}
"""
import sys
import json
import yaml
import argparse
from relayr import Client
import relayr.exceptions
def make_inventory(token):
"""
Scan relayr resources owned by user with given token via the API.
:param token: A relayr app access token (UUID).
:type token: str
:rtype token: dict
"""
c = Client(token=token)
try:
user = c.get_user()
except relayr.exceptions.RelayrApiException:
print('Could not connect. Maybe your token is wrong or has expired?')
print('Please check here: https://developer.relayr.io/dashboard/apps/myApps')
sys.exit(1)
result = {}
# user (owning the given app token)
user = c.get_user()
u = {'id': user.id, 'name': user.name, 'email': user.email}
result['user'] = u
# app (of the given app token)
app = c.get_app()
a = {'id': app.id, 'name': app.name}
tok = app.client.api.get_oauth2_appdev_token(app.id)
a['token'] = {'value': tok['token'], 'expiry-date': tok['expiryDate']}
result['app'] = a
# publishers
pubs = []
for pub in user.get_publishers():
pubs.append({'id': pub.id, 'name': pub.name, 'owner': pub.owner})
result['publishers'] = pubs
# device groups
dg = []
dev_groups = c.api.get_user_device_groups(user.id)
for grp in dev_groups:
dg.append(grp)
result['device-groups'] = dg
# devices
devs = []
for dev in c.api.get_user_devices(user.id):
channels = c.api.get_device_channels(dev['id'])
dev['channels'] = channels['channels']
devs.append(dev)
result['devices'] = devs
# bookmarked devices
bmd = []
bm_devices = user.get_bookmarked_devices()
for dev in bm_devices:
dev.get_info()
bmd.append({'id': dev.id, 'name': dev.name, 'public': dev.public})
result['bookmarked-devices'] = bmd
# transmitters
tx = []
for t in user.get_transmitters():
t.get_info()
dev_ids = [d.id for d in t.get_connected_devices()]
tx.append({
'id': t.id, 'name': t.name, 'owner': t.owner, 'secret': t.secret,
'credentials': t.credentials, 'type': t.integrationType,
'connected-devices': dev_ids
})
result['transmitters'] = tx
return result
if __name__ == '__main__':
desc = 'Scan all relayr resources of a given user via the relayr API.'
p = argparse.ArgumentParser(description=desc)
p.add_argument('token', help="The relayr app access token.")
p.add_argument('--format', metavar='NAME',
help='Output format to use ("json" or "yaml", default: "json").')
args = p.parse_args()
# make_inventory(args.token)
result = make_inventory(args.token)
if args.format and args.format.lower() == 'yaml':
out = yaml.safe_dump(result)
else:
out = json.dumps(result, indent=4, encoding='utf-8')
print out
|
papedaniel/oioioi | oioioi/disqualification/views.py | Python | gpl-3.0 | 755 | 0 | from oioioi.contests.models import Submission
from oioioi.dashboard.registry import dashboard_registry
from oioioi.disqualification.controllers import \
DisqualificationContestControllerMixin
@dashboard_registry.register_decorator(order=10)
def disqualification_fragment(request):
if not request.user.is_authenticated():
return None
cc = request.contest.controller
if not isinstance(cc, DisqualificationContestControllerMixin):
return None
sub | missions = Submission.objects \
.filter(problem_instance__contest=request.contest) \
| .order_by('-date').select_related()
submissions = cc.filter_my_visible_submissions(request, submissions)
return cc.render_disqualifications(request, submissions)
|
marcusmueller/gnuradio | gr-utils/python/modtool/core/disable.py | Python | gpl-3.0 | 7,940 | 0.003023 | #
# Copyright 2013, 2018, 2019 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public | License
# along with GNU Radio; see the file COPYING. If not, | write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
""" Disable blocks module """
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import re
import sys
import logging
from ..tools import CMakeFileEditor
from .base import ModTool, ModToolException
logger = logging.getLogger(__name__)
class ModToolDisable(ModTool):
""" Disable block (comments out CMake entries for files) """
name = 'disable'
description = 'Disable selected block in module.'
def __init__(self, blockname=None, **kwargs):
ModTool.__init__(self, blockname, **kwargs)
self.info['pattern'] = blockname
def validate(self):
""" Validates the arguments """
ModTool._validate(self)
if not self.info['pattern'] or self.info['pattern'].isspace():
raise ModToolException("Invalid pattern!")
def run(self):
""" Go, go, go! """
def _handle_py_qa(cmake, fname):
""" Do stuff for py qa """
cmake.comment_out_lines('GR_ADD_TEST.*'+fname)
self.scm.mark_file_updated(cmake.filename)
return True
def _handle_py_mod(cmake, fname):
""" Do stuff for py extra files """
try:
with open(self._file['pyinit']) as f:
initfile = f.read()
except IOError:
logger.warning("Could not edit __init__.py, that might be a problem.")
return False
pymodname = os.path.splitext(fname)[0]
initfile = re.sub(r'((from|import)\s+\b'+pymodname+r'\b)', r'#\1', initfile)
with open(self._file['pyinit'], 'w') as f:
f.write(initfile)
self.scm.mark_file_updated(self._file['pyinit'])
return False
def _handle_cc_qa(cmake, fname):
""" Do stuff for cc qa """
if self.info['version'] == '37':
cmake.comment_out_lines(r'\$\{CMAKE_CURRENT_SOURCE_DIR\}/'+fname)
fname_base = os.path.splitext(fname)[0]
ed = CMakeFileEditor(self._file['qalib']) # Abusing the CMakeFileEditor...
ed.comment_out_lines(r'#include\s+"{}.h"'.format(fname_base), comment_str='//')
ed.comment_out_lines(r'{}::suite\(\)'.format(fname_base), comment_str='//')
ed.write()
self.scm.mark_file_updated(self._file['qalib'])
elif self.info['version'] == '38':
fname_qa_cc = 'qa_{}.cc'.format(self.info['blockname'])
cmake.comment_out_lines(fname_qa_cc)
elif self.info['version'] == '36':
cmake.comment_out_lines('add_executable.*'+fname)
cmake.comment_out_lines('target_link_libraries.*'+os.path.splitext(fname)[0])
cmake.comment_out_lines('GR_ADD_TEST.*'+os.path.splitext(fname)[0])
self.scm.mark_file_updated(cmake.filename)
return True
def _handle_h_swig(cmake, fname):
""" Comment out include files from the SWIG file,
as well as the block magic """
with open(self._file['swig']) as f:
swigfile = f.read()
(swigfile, nsubs) = re.subn(r'(.include\s+"({}/)?{}")'.format(
self.info['modname'], fname),
r'//\1', swigfile)
if nsubs > 0:
logger.info("Changing {}...".format(self._file['swig']))
if nsubs > 1: # Need to find a single BLOCK_MAGIC
blockname = os.path.splitext(fname[len(self.info['modname'])+1:])[0]
if self.info['version'] in ('37', '38'):
blockname = os.path.splitext(fname)[0]
(swigfile, nsubs) = re.subn('(GR_SWIG_BLOCK_MAGIC2?.+{}.+;)'.format(blockname), r'//\1', swigfile)
if nsubs > 1:
logger.warning("Hm, changed more then expected while editing {}.".format(self._file['swig']))
with open(self._file['swig'], 'w') as f:
f.write(swigfile)
self.scm.mark_file_updated(self._file['swig'])
return False
def _handle_i_swig(cmake, fname):
""" Comment out include files from the SWIG file,
as well as the block magic """
with open(self._file['swig']) as f:
swigfile = f.read()
blockname = os.path.splitext(fname[len(self.info['modname'])+1:])[0]
if self.info['version'] in ('37', '38'):
blockname = os.path.splitext(fname)[0]
swigfile = re.sub(r'(%include\s+"'+fname+'")', r'//\1', swigfile)
logger.info("Changing {}...".format(self._file['swig']))
swigfile = re.sub('(GR_SWIG_BLOCK_MAGIC2?.+'+blockname+'.+;)', r'//\1', swigfile)
with open(self._file['swig'], 'w') as f:
f.write(swigfile)
self.scm.mark_file_updated(self._file['swig'])
return False
# This portion will be covered by the CLI
if not self.cli:
self.validate()
else:
from ..cli import cli_input
# List of special rules: 0: subdir, 1: filename re match, 2: callback
special_treatments = (
('python', r'qa.+py$', _handle_py_qa),
('python', r'^(?!qa).+py$', _handle_py_mod),
('lib', r'qa.+\.cc$', _handle_cc_qa),
('include/{}'.format(self.info['modname']), r'.+\.h$', _handle_h_swig),
('include', r'.+\.h$', _handle_h_swig),
('swig', r'.+\.i$', _handle_i_swig)
)
for subdir in self._subdirs:
if self.skip_subdirs[subdir]:
continue
if self.info['version'] in ('37', '38') and subdir == 'include':
subdir = 'include/{}'.format(self.info['modname'])
try:
cmake = CMakeFileEditor(os.path.join(subdir, 'CMakeLists.txt'))
except IOError:
continue
logger.info("Traversing {}...".format(subdir))
filenames = cmake.find_filenames_match(self.info['pattern'])
yes = self.info['yes']
for fname in filenames:
file_disabled = False
if not yes:
ans = cli_input("Really disable {}? [Y/n/a/q]: ".format(fname)).lower().strip()
if ans == 'a':
yes = True
if ans == 'q':
sys.exit(0)
if ans == 'n':
continue
for special_treatment in special_treatments:
if special_treatment[0] == subdir and re.match(special_treatment[1], fname):
file_disabled = special_treatment[2](cmake, fname)
if not file_disabled:
cmake.disable_file(fname)
cmake.write()
self.scm.mark_files_updated((os.path.join(subdir, 'CMakeLists.txt'),))
logger.warning("Careful: 'gr_modtool disable' does not resolve dependencies.")
|
Kuniwak/vint | vint/utils/array.py | Python | mit | 349 | 0.008596 | from typing import Ty | peVar, List, Callable # noqa: F401
from functools import reduce
from operator import add
T = TypeVar('T')
S = TypeVar('S')
def flatten(l):
# type: (List[List[T]]) -> List[T]
return reduce(add, l, [])
def flat_map(f, l):
# type: (Callable[[S], List[T] | ], List[S]) -> List[T]
return flatten([f(x) for x in l]) |
tensorflow/moonlight | moonlight/training/generation/generation.py | Python | apache-2.0 | 11,447 | 0.005154 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""VexFlow labeled data generation.
Wraps the node.js generator, which generates a random measure of music as SVG,
and the ground truth glyphs present in the image as a `Page` message.
Each invocation generates a batch of images. There is a tradeoff between the
startup time of node.js for each invocation, and keeping the output size small
enough to pipe into Python.
The final outputs are positive and negative example patches. Positive examples
are centered on an outputted glyph, and have that glyph's type. Negative
examples are at least a few pixels away from any glyph, and have type NONE.
Since negative examples could be a few pixels away from a glyph, we get negative
examples that overlap with partial glyph(s), but are centered too far away from
a glyph to be considered a positive example. Currently, every single glyph
results in a single positive example, and negative examples are randomly
sampled.
All glyphs are emitted to RecordIO, where they are outputted in a single
collection for training. We currently do not store the entire generated image
anywhere. This could be added later in order to try other classification
approaches.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os.path
import random
import subprocess
import sys
import apache_beam as beam
from apache_beam.metrics import Metrics
import numpy as np
import tensorflow as tf
from google.protobuf import text_format
from moonlight import engine
from moonlight.protobuf import musicscore_pb2
from moonlight.staves import staffline_distance
from moonlight.staves import staffline_extractor
# Every image is expected to contain at least 3 glyphs.
POSITIVE_EXAMPLES_PER_IMAGE = 3
def _normalize_path(filename):
"""Normalizes a relative path to a command to spawn.
Args:
filename: String; relative or absolute path.
Returns:
The normalized path. This is necessary because in our use case,
vexflow_generator_pipeline will live in a different directory from
vexflow_generator, and there are symlinks to both directories in the same
parent directory. Without normalization, `..` would reference the parent of
the actual directory that was symlinked. With normalization, it references
the directory that contains the symlink to the working directory.
"""
if filename.startswith('/'):
return filename
else:
return os.path.normpath(
os.path.join(os.path.dirname(sys.argv[0]), filename))
class PageGenerationDoFn(beam.DoFn):
"""Generates the PNG images and ground truth for each batch.
Takes in a batch number, and outputs a tuple of PNG contents (bytes) and the
labeled staff (Staff message).
"""
def __init__(self, num_pages_per_batch, vexflow_generator_command,
svg_to_png_command):
self.num_pages_per_batch = num_pages_per_batch
self.vexflow_generator_command = vexflow_generator_command
self.svg_to_png_command = svg_to_png_command
def process(self, batch_num):
for page in self.get_pages_for_batch(batch_num, self.num_pages_per_batch):
staff = musicscore_pb2.Staff()
text_format.Parse(page['page'], staff)
# TODO(ringw): Fix the internal proto pickling issue so that we don't
# have to serialize the staff here.
yield self._svg_to_png(page['svg']), staff.SerializeToString()
def get_pages_for_batch(self, batch_num, num_pages_per_batch):
"""Generates the music score pages in a single batch.
The generator takes in a seed for the RNG for each page, and outputs all
pages at once. The seeds for all batches are consecutive for determinism,
starting from 0, but each seed to the Mersenne Twister RNG should result in
completely different output.
Args:
batch_num: The index of the batch to output.
num_pages_per_batch: The number of pages to generate in each batch.
Returns:
A list of dicts holding `svg` (XML text) and `page` (text-format
`tensorflow.moonlight.Staff` proto).
"""
return self.get_pages(
range(batch_num * num_pages_per_batch,
(batch_num + 1) * num_pages_per_batch))
def get_pages(self, seeds):
vexflow_generator_command = list(self.vexflow_generator_command)
# If vexflow_generat | or_command is relative, it is relative to the pipeline
# binary.
vexflow_generator_command[0] = _normalize_path(vexflow_generator_command[0]) |
seeds = ','.join(map(str, seeds))
return json.loads(
subprocess.check_output(vexflow_generator_command +
['--random_seeds=' + seeds]))
def _svg_to_png(self, svg):
svg_to_png_command = list(self.svg_to_png_command)
svg_to_png_command[0] = _normalize_path(svg_to_png_command[0])
popen = subprocess.Popen(
svg_to_png_command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = popen.communicate(input=svg)
if popen.returncode != 0:
raise ValueError('convert failed with status %d\nstderr:\n%s' %
(popen.returncode, stderr))
return stdout
class PatchExampleDoFn(beam.DoFn):
"""Extracts labeled patches from generated VexFlow music scores."""
def __init__(self,
negative_example_distance,
patch_width,
negative_to_positive_example_ratio,
noise_fn=lambda x: x):
self.negative_example_distance = negative_example_distance
self.patch_width = patch_width
self.negative_to_positive_example_ratio = negative_to_positive_example_ratio
self.noise_fn = noise_fn
self.patch_counter = Metrics.counter(self.__class__, 'num_patches')
def start_bundle(self):
# TODO(ringw): Expose a cleaner way to set this value.
# The image is too small for the default min staffline distance score.
# pylint: disable=protected-access
staffline_distance._MIN_STAFFLINE_DISTANCE_SCORE = 100
self.omr = engine.OMREngine()
def process(self, item):
png_contents, staff_message = item
staff_message = musicscore_pb2.Staff.FromString(staff_message)
with tf.Session(graph=self.omr.graph) as sess:
# Load the image, then feed it in to apply noise.
# Randomly rotate the image and apply noise, then dump it back out as a
# PNG.
# TODO(ringw): Expose a way to pass in the image contents to the main
# OMR TF graph.
img = tf.to_float(tf.image.decode_png(png_contents))
# Collapse the RGB channels, if any. No-op for a monochrome PNG.
img = tf.reduce_mean(img[:, :, :3], axis=2)[:, :, None]
# Fix the stafflines being #999.
img = tf.clip_by_value(img * 2. - 255., 0., 255.)
img = self.noise_fn(img)
# Get a 2D uint8 image array for OMR.
noisy_image = sess.run(
tf.cast(tf.clip_by_value(img, 0, 255)[:, :, 0], tf.uint8))
# Run OMR staffline extraction and staffline distance estimation. The
# stafflines are used to get patches from the generated image.
stafflines, image_staffline_distance = sess.run(
[
self.omr.glyph_classifier.staffline_extractor.extract_staves(),
self.omr.structure.staff_detector.staffline_distance[0]
],
feed_dict={self.omr.image: noisy_image})
if stafflines.shape[0] != 1:
raise ValueError('Image should have one detected staff, got shape: ' +
str(stafflines.shape))
positive_example_count = 0
negative_example_whitelist = np.ones(
(stafflines.shape[staffline_ |
City-of-Helsinki/linkedevents | helevents/migrations/0004_auto_20180109_1727.py | Python | mit | 639 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2018-01-09 15:27
from __future__ import unicode_literals |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('helusers', '0001_add_ad_groups'),
('helevents', '0003_auto_20170915_1529'),
]
operations = [
migrations.AlterModelOptions(
name='user',
options={'ordering': ('id',)},
),
migrations.AddField(
model_name='user',
name='ad_groups',
field=models.ManyToManyField(blank=Tru | e, to='helusers.ADGroup'),
),
]
|
ColumbiaCMB/kid_readout | kid_readout/measurement/acquire.py | Python | bsd-2-clause | 9,283 | 0.003878 | """
Basic framework for acquiring a roach measurement that includes both sweep(s) and stream(s).
Acquire
-Initialize equipment.
-Initialize roach: preload frequencies, if necessary.
-Create state dictionary containing state from all equipment, including temperatures, if possible.
-Run a coarse sweep, if necessary: create a SweepArray and extract resonance frequencies.
-Run fine sweeps to map out resonance frequencies carefully.
If desired, we can combine the data from coarse and fine sweeps into a single SweepArray.
All streams in these sweeps are created with the same roach state, which should not change during the sweeps.
The sweep(s) are created with the experiment state, which should also not change.
Acquire streams:
-Initialize equipment for stream(s).
-Initialize roach for stream(s).
-Create experiment state dictionary.
-Acquire a StreamArray.
-Repeat the stream acquisition as needed
-Instantiate the final measurement with all data, and save it to disk.
-Clean up equipment.
If instead we want to save data as it is collected, we can do that by writing a blank final measurement to disk, then
writing the sub-measurements as they are acquired.
"""
from __future__ import division
import os
import sys
import time
import inspect
import subprocess
import logging
import numpy as np
from kid_readout import settings
from kid_readout.utils import log
from kid_readout.measurement import core, basic
from kid_readout.measurement.io import nc, npy
logger = logging.getLogger(__name__)
# Frequency sweep
def load_baseband_sweep_tones(ri, tone_banks, num_tone_samples):
return ri.set_tone_freqs(freqs=np.vstack(tone_banks), nsamp=num_tone_samples)
def load_heterodyne_sweep_tones(ri, tone_banks, num_tone_samples):
return ri.set_tone_freqs(freqs=np.vstack(tone_banks), nsamp=num_tone_samples)
def run_sweep(ri, tone_banks, num_tone_samples, length_seconds=0, state=None, description='', verbose=False,
wait_for_sync=0.1, **kwargs):
"""
Return a SweepArray acquired using the given tone banks.
Parameters
----------
ri : RoachInterface
An instance of a subclass.
tone_banks : iterable of ndarray (float)
An iterable of arrays (or a 2-D array) of frequencies to use for the sweep.
num_tone_samples : int
The number of samples in the playback buffer; must be a power of two.
length_seconds : float
The duration of each data stream; the default of 0 means the minimum unit of data that can be read out in the
current configuration.
state : dict
The non-roach state to pass to the SweepArray.
description : str
A human-readable description of the measure | ment.
verbose : bool
If true, print progress messages.
wait_for_sync : float
Sleep for this time in seconds to let the ROACH sync finish.
kwargs
Keyword arguments passed to ri.get_measurement().
Returns
-------
SweepArray
"""
stream_arrays = core.MeasurementList()
if verbose:
print("Measuring bank")
| for n, tone_bank in enumerate(tone_banks):
if verbose:
print n,
sys.stdout.flush()
ri.set_tone_freqs(tone_bank, nsamp=num_tone_samples)
ri.select_fft_bins(np.arange(tone_bank.size))
# we wait a bit here to let the roach2 sync catch up. figuring this out still.
time.sleep(wait_for_sync)
stream_arrays.append(ri.get_measurement(num_seconds=length_seconds, **kwargs))
return basic.SweepArray(stream_arrays, state=state, description=description)
def run_loaded_sweep(ri, length_seconds=0, state=None, description='', tone_bank_indices=None, bin_indices=None,
verbose=False, **kwargs):
"""
Return a SweepArray acquired using previously-loaded tones.
Parameters
----------
ri : RoachInterface
An instance of a subclass.
length_seconds : float
The duration of each data stream; the default of 0 means the minimum unit of data that can be read out in the
current configuration.
state : dict
The non-roach state to pass to the SweepArray.
description : str
A human-readable description of the measurement.
tone_bank_indices : numpy.ndarray[int]
The indices of the tone banks to use in the sweep; the default is to use all existing.
bin_indices : numpy.ndarray[int]
The indices of the filterbank bins to read out; the default is to read out all bins.
verbose : bool
If true, print progress messages.
kwargs
Keyword arguments passed to ri.get_measurement().
Returns
-------
SweepArray
"""
if tone_bank_indices is None:
tone_bank_indices = np.arange(ri.tone_bins.shape[0])
if bin_indices is None:
bin_indices = np.arange(ri.tone_bins.shape[1])
stream_arrays = core.MeasurementList()
if verbose:
print "Measuring bank:",
for tone_bank_index in tone_bank_indices:
if verbose:
print tone_bank_index,
sys.stdout.flush()
ri.select_bank(tone_bank_index)
ri.select_fft_bins(bin_indices)
stream_arrays.append(ri.get_measurement(num_seconds=length_seconds, **kwargs))
return basic.SweepArray(stream_arrays, state=state, description=description)
def run_multipart_sweep(ri, length_seconds=0, state=None, description='', num_tones_read_at_once=32, verbose=False,
**kwargs):
num_tones = ri.tone_bins.shape[1]
num_steps = num_tones // num_tones_read_at_once
if num_steps == 0:
num_steps = 1
indices_to_read = range(num_tones)
parts = []
for step in range(num_steps):
if verbose:
print("running sweep step {} of {}.".format(step,num_steps))
parts.append(run_loaded_sweep(ri, length_seconds=length_seconds, state=state, description=description,
bin_indices=indices_to_read[step::num_steps], **kwargs))
stream_arrays = core.MeasurementList()
for part in parts:
stream_arrays.extend(list(part.stream_arrays))
return basic.SweepArray(stream_arrays, state=state, description=description)
# Metadata
def script_code():
"""
Return the source code of a module running as '__main__'. Acquisition scripts can use this to save their code.
If attempting to load the source code raises an exception, return a string representation of the exception.
Returns
-------
str
The code, with lines separated by newline characters.
"""
try:
return inspect.getsource(sys.modules['__main__'])
except Exception as e:
return str(e)
def git_log():
import kid_readout
kid_readout_directory = os.path.dirname(os.path.abspath(kid_readout.__file__))
try:
return subprocess.check_output(("cd {}; git log -1".format(kid_readout_directory)), shell=True)
except Exception as e:
return str(e)
def git_status():
import kid_readout
kid_readout_directory = os.path.dirname(os.path.abspath(kid_readout.__file__))
try:
return subprocess.check_output(("cd {}; git status --porcelain".format(kid_readout_directory)), shell=True)
except Exception as e:
return str(e)
def all_metadata():
meta = {'script_code': script_code(),
'git_log': git_log(),
'git_status': git_status(),
'cryostat': settings.CRYOSTAT,
'cooldown': settings.COOLDOWN}
return meta
# IO object creation
def new_nc_file(suffix='', directory=settings.BASE_DATA_DIR, metadata=None):
if suffix and not suffix.startswith('_'):
suffix = '_' + suffix
if metadata is None:
metadata = all_metadata()
root_path = os.path.join(directory, time.strftime('%Y-%m-%d_%H%M%S') + suffix + nc.NCFile.EXTENSION)
logger.debug("Creating new NCFile with path %s" % root_path)
return nc.NCFile(root_path, metadata=metadata)
def new_npy_directory(suffix='', directory=settings.BASE_DATA_DIR, metadata=None):
if suffix and not suffix.startswith('_'):
suffix = '_' + suffix
if metadata is None:
metada |
neeraj9/gprsmonitor | src/sniff.py | Python | apache-2.0 | 1,409 | 0.023421 | #! /usr/bin/python
__author__="kebo"
__date__ ="$2009-11-5 11:15:55$"
import pcap
import sys
import string
import time
import socket
import struct
import getopt
protocols={socket.IPPROTO_TCP:'tcp',
socket.IPPROTO_UDP:'udp',
socket.IPPROTO_ICMP:'icmp'}
node = None
mb = None
decoder = None
def send(payload):
sz = len(payload)
header= struct.pack("!h", sz)
return sys.stdout.write( header + payload )
def print_packet(pktlen, data, timestamp):
global mb
if not data:
return
#send(data)
#print data
#print timestamp
print '\n%s.%f' % (time.strftime('%H:%M',time.localtime(timestamp)),timestamp % 60)
if __name__=='__main__':
p = pcap.pcapObject()
#dev = pcap.lookupdev( | )
dev = "eth0"
net, mask = pcap.lookupnet(dev)
# note: to_ms does nothing on linux
p.open_live(dev, 1600, 0, 100)
#p.dump_open('dumpfile')
p.setfilter(string.join(["tcp","port 22"],' '), 0, 0)
# try-except block to catch keyboard interrupt. Failure to shut
# down cleanly can result in the interface not being taken out of promisc.
# mode
#p.setnonblock(1)
try:
while 1:
p.dispatch(1, print_packet)
except K | eyboardInterrupt:
print '%s' % sys.exc_type
print 'shutting down'
print '%d packets received, %d packets dropped, %d packets dropped by interface' % p.stats()
|
novafloss/django-formidable | formidable/exceptions.py | Python | mit | 99 | 0 |
class Un | knownAccess(Exception):
"""
Access doesn't exist for this user.
""" |
pass
|
Arcanemagus/SickRage | tests/torrent_tests.py | Python | gpl-3.0 | 4,493 | 0.003784 | # coding=UTF-8
# Author: Dennis Lutter <lad1337@gmail.com>
# URL: https://sick-rage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
"""
Test torrents
"""
# pylint: disable=line-too-long
import os.path
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from bs4 import BeautifulSoup
from sickbeard.helpers import getURL, make_session
from sickbeard.providers.bitcannon import BitCannonProvider
from sickbeard.tv import TVEpisode, TVShow
import tests.test_lib as test
from six.moves import urllib
class TorrentBasicTests(test.SickbeardTestDBCase):
"""
Test torrents
"""
@classmethod
def setUpClass(cls):
cls.shows = []
show = TVShow(1, 121361)
show.name | = "Italian Works"
show.episodes = []
episode = TVEpisode(show, 5, 10)
episode.name = "Pines of Rome"
episode.scene_season = 5
episode.scene_episode = 10
show.episodes.append(episode)
cls.shows.append(show)
def test_bitcannon(self):
"""
Test bitcannon
"""
bitcannon = BitCannonProvider()
bitcannon.custom_url = "" # true testing requires a valid URL | here (e.g., "http://localhost:3000/")
bitcannon.api_key = ""
if bitcannon.custom_url:
# pylint: disable=protected-access
search_strings_list = bitcannon._get_episode_search_strings(self.shows[0].episodes[0]) # [{'Episode': ['Italian Works S05E10']}]
for search_strings in search_strings_list:
bitcannon.search(search_strings) # {'Episode': ['Italian Works S05E10']} # pylint: disable=protected-access
return True
@staticmethod
@unittest.skip('KickAssTorrents is down, needs a replacement') # TODO
def test_search(): # pylint: disable=too-many-locals
"""
Test searching
"""
url = 'http://kickass.to/'
search_url = 'http://kickass.to/usearch/American%20Dad%21%20S08%20-S08E%20category%3Atv/?field=seeders&sorder=desc'
html = getURL(search_url, session=make_session(), returns='text')
if not html:
return
soup = BeautifulSoup(html, 'html5lib')
torrent_table = soup.find('table', attrs={'class': 'data'})
torrent_rows = torrent_table('tr') if torrent_table else []
# cleanup memory
soup.clear(True)
# Continue only if one Release is found
if len(torrent_rows) < 2:
print("The data returned does not contain any torrents")
return
for row in torrent_rows[1:]:
try:
link = urllib.parse.urljoin(url, (row.find('div', {'class': 'torrentname'})('a')[1])['href'])
_id = row.get('id')[-7:]
title = (row.find('div', {'class': 'torrentname'})('a')[1]).text \
or (row.find('div', {'class': 'torrentname'})('a')[2]).text
url = row.find('a', 'imagnet')['href']
verified = True if row.find('a', 'iverify') else False
trusted = True if row.find('img', {'alt': 'verified'}) else False
seeders = int(row('td')[-2].text)
leechers = int(row('td')[-1].text)
_ = link, _id, verified, trusted, seeders, leechers
except (AttributeError, TypeError):
continue
print(title)
if __name__ == "__main__":
print("==================")
print("STARTING - Torrent Basic TESTS")
print("==================")
print("######################################################################")
SUITE = unittest.TestLoader().loadTestsFromTestCase(TorrentBasicTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
|
ttfseiko/openerp-trunk | openerp/addons/hr_attendance/wizard/hr_attendance_bymonth.py | Python | agpl-3.0 | 2,066 | 0.000968 | # -*- coding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv, fields
class hr_attendance_bymonth(osv.osv_memory):
_name = 'hr.attendance.month'
_description = 'Print Monthly Attendance Report'
_columns = {
'month': fields.selection([(1, 'January'), (2, 'February'), (3, 'March'), (4, 'April'), (5, 'May'), (6, 'June'), (7, 'July'), (8, 'August'), (9, 'September'), (10, 'October'), (11, 'November'), (12, 'December')], 'Month', required=True),
'year': fie | lds.integer('Year', required=True)
}
_defaults = {
'month': lambda *a: time.gmtime()[1],
'year': lambda *a: time.gmtime()[0],
}
def print_report(self, cr, uid, ids, context=None):
datas = {
'ids': [],
| 'active_ids': context['active_ids'],
'model': 'hr.employee',
'form': self.read(cr, uid, ids)[0]
}
return {
'type': 'ir.actions.report.xml',
'report_name': 'hr.attendance.bymonth',
'datas': datas,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
rajul/tvb-framework | tvb/core/services/texture_to_json.py | Python | gpl-2.0 | 2,352 | 0.005102 | # -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
Converts a color scheme texture image to json arrays
.. moduleauthor:: Mihai Andrei <mihai.andrei@codemart.ro>
"""
import numpy
import Image
# See TVB-985
if not hasattr(Image, 'open'):
from Image import Image
def color_texture_to_list(img_pth, img_width, band_height):
"""
:param img_pth: Path to the texure
:param img_width: Texture width
:param band_height: Height of a color scheme band
:return: A list of img_width/band_height color schemes. A scheme is a list of img_width colors
"""
im = Image.open(img_pth)
ima = numpy.asarray(im)
if ima.shape != (img_width, img_width, 4):
raise ValueError("unexpected image shape " + str(ima.shape))
tex_vs = [(i * band_height + | 0.5)/img_width for i in xrange(img_width/band_height)]
color_schemes = []
for v in tex_vs:
idx = int(v * img_width)
color_schemes.append(ima[idx, :, :3].tolist())
return color | _schemes |
ibabushkin/BeastBot | src/inc/modules/tinyurl.py | Python | gpl-3.0 | 1,158 | 0.007772 | from inc import *
#from __future__ import with_statement
import re
import contextlib
from urllib import urlencode
from urllib2 import urlopen
import sys
modFunc.addCommand('tiny', 'tinyurl', 'getTiny')
modFunc.addCommand('tinygfm', 'tinyurl', 'tinyLMGTFY')
modFunc.addCommand('tinylmgtfy', 'tinyurl', 'tinyLMGTFY')
modFunc.addCommand('lmgtfy', 'tinyurl', 'tinyLMGTFY')
def getTiny(line, irc):
message, whole, username, msgto = ircFunc.ircMessage(line.strip(), whl=True)
url = ' '.join(message[1:])
if url != "":
request_url = ('http://tinyurl.com/api-create.php?%s' % urlencode({'url':url}))
with contextlib.closing(urlopen(r | equest_url)) as response:
ircFunc.ircSay(msgto, response.read().decode('utf-8'), irc)
def tinyLMGTFY(line, irc):
message, whole, username, msgto = ircFunc.ircMessage(line.strip(), whl=True)
url = ' '.join(message[1:])
| url = 'http://lmgtfy.com/?q=' + url
request_url = ('http://tinyurl.com/api-create.php?%s' % urlencode({'url':url}))
with contextlib.closing(urlopen(request_url)) as response:
ircFunc.ircSay(msgto, response.read().decode('utf-8'), irc)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.