repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
christianmemije/kolibri | kolibri/core/webpack/test/test_webpack_tags.py | Python | mit | 880 | 0.002273 | from __future__ import absolute_import, print_function, unicode_literals
from django.template import Template, Context
from django.test.testcases import TestCase
from .base import TestHook
class KolibriTagNavigationTestCase(TestCase):
def setUp(self):
super(KolibriTagNavigationTestCase, self).setUp | ()
| self.test_hook = TestHook()
def test_frontend_tag(self):
self.assertIn(
"non_default_frontend",
self.test_hook.render_to_page_load_sync_html()
)
def test_frontend_tag_in_template(self):
t = Template(
"""{% load webpack_tags %}\n{% webpack_asset 'non_default_frontend' %}""")
c = Context({})
self.test_hook._stats_file
self.assertIn(
self.test_hook.TEST_STATS_FILE_DATA['chunks'][TestHook.unique_slug][0]['name'],
t.render(c)
)
|
snogaraleal/adjax | adjax/tests/test_templatetags.py | Python | mit | 1,451 | 0 | from django.test import TestCase
try:
from django.contrib.staticfiles.templatetags.staticfiles import static
except ImportError:
from django.templatetags.static import static
try:
from django.core.urlresolvers import reverse
except ImportError:
from django.urls import reverse
from ..templatetags.adjax import (get_script_tag, get_script_tags,
| adjax_scripts)
class TemplateTagsTestCase(TestCase):
def test_get_script_tag(self):
""" Test script tag HTML.
"""
source = 'https://cdn.com/source.js'
expected_html = | ('<script type="text/javascript" '
'src="{}"></script>').format(source)
self.assertEqual(get_script_tag(source), expected_html)
def test_get_script_tags(self):
""" Test script tag HTML joining.
"""
a_html = get_script_tag('a')
b_html = get_script_tag('b')
c_html = get_script_tag('c')
self.assertEqual(get_script_tags('a', 'b', 'c'),
a_html + b_html + c_html)
def test_adjax_scripts(self):
""" Test adjax scripts template tag.
"""
adjax_js_html = adjax_scripts()
base_js_html = get_script_tag(static('adjax/base.js'))
interface_js_html = get_script_tag(reverse('adjax_interface'))
self.assertIn(base_js_html, adjax_js_html)
self.assertIn(interface_js_html, adjax_js_html)
|
Zlash65/erpnext | erpnext/accounts/general_ledger.py | Python | gpl-3.0 | 9,852 | 0.025376 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, erpnext
from frappe.utils import flt, cstr, cint
from frappe import _
from frappe.model.meta import get_field_precision
from erpnext.accounts.doctype.budget.budget import validate_expense_against_budget
from erpnext.accounts.doctype.accounting_dimension.accounting_dimension import get_accounting_dimensions
class ClosedAccountingPeriod(frappe.ValidationError): pass
class StockAccountInvalidTransaction(frappe.ValidationError): pass
def make_gl_entries(gl_map, cancel=False, adv_adj=False, merge_entries=True, update_outstanding='Yes', from_repost=False):
if gl_map:
if not cancel:
validate_accounting_period(gl_map)
gl_map = process_gl_map(gl_map, merge_entries)
if gl_map and len(gl_map) > 1:
save_entries(gl_map, adv_adj, update_outstanding, from_repost)
else:
frappe.throw(_("Incorrect number of General Ledger Entries found. You might have selected a wrong Account in the transaction."))
else:
delete_gl_entries(gl_map, adv_adj=adv_adj, update_outstanding=update_outstanding)
def validate_accounting_period(gl_map):
accounting_periods = frappe.db.sql(""" SELECT
ap.name as name
FROM
`tabAccounting Period` ap, `tabClosed Document` cd
WHERE
ap.name = cd.parent
AND ap.company = %(company)s
AND cd.closed = 1
AND cd.document_type = %(voucher_type)s
AND %(date)s between ap.start_date and ap.end_date
""", {
'date': gl_map[0].posting_date,
'company': gl_map[0].company,
'voucher_type': gl_map[0].voucher_type
}, as_dict=1)
if accounting_periods:
frappe.throw(_("You can't create accounting entries in the closed accounting period {0}")
.format(accounting_periods[0].name), ClosedAccountingPeriod)
def process_gl_map(gl_map, merge_entries=True):
if merge_entries:
gl_map = merge_similar_entries(gl_map)
for entry in gl_map:
# toggle debit, credit if negative entry
if flt(entry.debit) < 0:
entry.credit = flt(entry.credit) - flt(entry.debit)
entry.debit = 0.0
if flt(entry.debit_in_account_currency) < 0:
entry.credit_in_account_currency = \
flt(entry.credit_in_account_currency) - flt(entry.debit_in_account_currency)
entry.debit_in_account_currency = 0.0
if flt(entry.credit) < 0:
entry.debit = flt(entry.debit) - flt(entry.credit)
entry.credit = 0.0
if flt(entry.credit_in_account_currency) < 0:
entry.debit_in_account_currency = \
flt(entry.debit_in_account_currency) - flt(entry.credit_in_account_currency)
entry.credit_in_account_currency = 0.0
return gl_map
def merge_similar_entries(gl_map):
merged_gl_map = []
accounting_dimensions = get_accounting_dimensions()
for entry in gl_map:
# if there is already an entry in this account then just add it
# to that entry
same_head = check_if_in_list(entry, merged_gl_map, accounting_dimensions)
if same_head:
same_head.debit = flt(same_head.debit) + flt(entry.debit)
same_head.debit_in_account_currency = \
flt(same_head.debit_in_account_currency) + flt(entry.debit_in_account_currency)
same_head.credit = flt(same_head.credit) + flt(entry.credit)
same_head.credit_in_account_currency = \
flt(same_head.credit_in_account_currency) + flt(entry.credit_in_account_currency)
else:
merged_gl_map.append(entry)
# filter zero debit and credit entries
merged_gl_map = filter(lambda x: flt(x.debit, 9)!=0 or flt(x.credit, 9)!=0, merged_gl_map)
merged_gl_map = list(merged_gl_map)
return merged_gl_map
def check_if_in_list(gle, gl_map, dimensions=None):
account_head_fieldnames = ['party_type', 'party', 'against_voucher', 'against_voucher_type',
'cost_center', 'project']
if dimensions:
account_head_fieldnames = account_head_fieldnames + dimensions
for e in gl_map:
same_head = True
if e.account != gle.account:
same_head = False
for fieldname in account_head_fieldnames:
if cstr(e.get(fieldname)) != cstr(gle.get(fieldname)):
same_head = False
if same_head:
return e
def save_entries(gl_map, adv_adj, update_outstanding, from_repost=False):
if not from_repost:
validate_account_for_perpetual_inventory(gl_map)
validate_cwip_accounts(gl_map)
round_off_debit_credit(gl_map)
for entry in gl_map:
make_entry(entry, adv_adj, update_outstanding, from_repost)
# check against budget
if not from_repost:
validate_expense_against_budget(entry)
def make_entry(args, adv_adj, update_outstanding, from_repost=False):
args.update({"doctype": "GL Entry"})
gle = frappe.get_doc(args)
gle.flags.ignore_permissions = 1
gle.flags.from_repost = from_repost
gle.insert()
gle.run_method("on_update_with_args", adv_adj, update_outstanding, from_repost)
gle.submit()
def validate_account_for_perpetual_inventory(gl_map):
if cint(erpnext.is_perpetual_inventory_enabled(gl_map[0].company)) \
and gl_m | ap[0].voucher_type=="Journal Entry":
aii_accounts = [d[0] for d in frappe.db.sql("""select name from | tabAccount
where account_type = 'Stock' and is_group=0""")]
for entry in gl_map:
if entry.account in aii_accounts:
frappe.throw(_("Account: {0} can only be updated via Stock Transactions")
.format(entry.account), StockAccountInvalidTransaction)
def validate_cwip_accounts(gl_map):
if not cint(frappe.db.get_value("Asset Settings", None, "disable_cwip_accounting")) \
and gl_map[0].voucher_type == "Journal Entry":
cwip_accounts = [d[0] for d in frappe.db.sql("""select name from tabAccount
where account_type = 'Capital Work in Progress' and is_group=0""")]
for entry in gl_map:
if entry.account in cwip_accounts:
frappe.throw(_("Account: <b>{0}</b> is capital Work in progress and can not be updated by Journal Entry").format(entry.account))
def round_off_debit_credit(gl_map):
precision = get_field_precision(frappe.get_meta("GL Entry").get_field("debit"),
currency=frappe.get_cached_value('Company', gl_map[0].company, "default_currency"))
debit_credit_diff = 0.0
for entry in gl_map:
entry.debit = flt(entry.debit, precision)
entry.credit = flt(entry.credit, precision)
debit_credit_diff += entry.debit - entry.credit
debit_credit_diff = flt(debit_credit_diff, precision)
if gl_map[0]["voucher_type"] in ("Journal Entry", "Payment Entry"):
allowance = 5.0 / (10**precision)
else:
allowance = .5
if abs(debit_credit_diff) >= allowance:
frappe.throw(_("Debit and Credit not equal for {0} #{1}. Difference is {2}.")
.format(gl_map[0].voucher_type, gl_map[0].voucher_no, debit_credit_diff))
elif abs(debit_credit_diff) >= (1.0 / (10**precision)):
make_round_off_gle(gl_map, debit_credit_diff, precision)
def make_round_off_gle(gl_map, debit_credit_diff, precision):
round_off_account, round_off_cost_center = get_round_off_account_and_cost_center(gl_map[0].company)
round_off_account_exists = False
round_off_gle = frappe._dict()
for d in gl_map:
if d.account == round_off_account:
round_off_gle = d
if d.debit_in_account_currency:
debit_credit_diff -= flt(d.debit_in_account_currency)
else:
debit_credit_diff += flt(d.credit_in_account_currency)
round_off_account_exists = True
if round_off_account_exists and abs(debit_credit_diff) <= (1.0 / (10**precision)):
gl_map.remove(round_off_gle)
return
if not round_off_gle:
for k in ["voucher_type", "voucher_no", "company",
"posting_date", "remarks", "is_opening"]:
round_off_gle[k] = gl_map[0][k]
round_off_gle.update({
"account": round_off_account,
"debit_in_account_currency": abs(debit_credit_diff) if debit_credit_diff < 0 else 0,
"credit_in_account_currency": debit_credit_diff if debit_credit_diff > 0 else 0,
"debit": abs(debit_credit_diff) if debit_credit_diff < 0 else 0,
"credit": debit_credit_diff if debit_credit_diff > 0 else 0,
"cost_center": round_off_cost_center,
"party_type": None,
"party": None,
"against_voucher_type": None,
"against_voucher": None
})
if not round_off_account_exists:
gl_map.append(round_off_gle)
def get_round_off_account_and_cost_center(company):
round_off_account, round_off_cost_center |
abuibrahim/nixops | nixops/resources/azure_reserved_ip_address.py | Python | lgpl-3.0 | 6,605 | 0.007116 | # -*- coding: utf-8 -*-
# Automatic provisioning of Azure reserved IP addresses.
import os
import azure
import time
from nixops.util import attr_property
from nixops.azure_common import ResourceDefinition, ResourceState, normalize_location
from azure.mgmt.network import *
class AzureReservedIPAddressDefinition(ResourceDefinition):
"""Definition of an Azure Reserved IP Address"""
@classmethod
def get_type(cls):
return "azure-reserved-ip-address"
@classmethod
def get_resource_type(cls):
return "azureReservedIPAddresses"
def __init__(self, xml):
ResourceDefinition.__init__(self, xml)
self.reserved_ip_address_name = self.get_option_value(xml, 'name', str)
self.copy_option(xml, 'resourceGroup', 'resource')
self.copy_location(xml)
self.copy_tags(xml)
self.copy_option(xml, 'idleTimeout', int)
self.copy_option(xml, 'domainNameLabel', str, optional = True)
self.copy_option(xml, 'reverseFqdn', str, optional = True)
self.allocation_method = 'Static'
def show_type(self):
return "{0} [{1}]".format(self.get_type(), self.location)
class AzureReservedIPAddressState(ResourceState):
"""State of an Azure Reserved IP Address"""
reserved_ip_address_name = attr_property("azure.name", None)
| resource_group = attr_property("azure.resourceGroup", None)
location = attr_property("azure.location", None)
tags = attr_property("azure.tags", {}, 'json')
idle_timeout = attr_property("azure.idleTimeout", None, int)
domain_name_label = attr_property("azure.domainNameLabel", None)
allocation_method = attr_property("azure.allocationMethod", N | one)
fqdn = attr_property("azure.fqdn", None)
reverse_fqdn = attr_property("azure.reverseFqdn", None)
ip_address = attr_property("azure.ipAddress", None)
@classmethod
def get_type(cls):
return "azure-reserved-ip-address"
def show_type(self):
s = super(AzureReservedIPAddressState, self).show_type()
if self.state == self.UP: s = "{0} [{1}]".format(s, self.location)
return s
@property
def resource_id(self):
return self.reserved_ip_address_name
@property
def full_name(self):
return "Azure reserved IP address '{0}'".format(self.reserved_ip_address_name)
@property
def public_ipv4(self):
return self.ip_address
def get_resource(self):
try:
return self.nrpc().public_ip_addresses.get(
self.resource_group, self.resource_id).public_ip_address
except azure.common.AzureMissingResourceHttpError:
return None
def destroy_resource(self):
self.nrpc().public_ip_addresses.delete(self.resource_group, self.resource_id)
defn_properties = [ 'location', 'tags', 'idle_timeout', 'allocation_method',
'domain_name_label', 'reverse_fqdn' ]
def create_or_update(self, defn):
dns_settings = PublicIpAddressDnsSettings(
domain_name_label = defn.domain_name_label,
reverse_fqdn = defn.reverse_fqdn
) if defn.domain_name_label or defn.reverse_fqdn else None
self.nrpc().public_ip_addresses.create_or_update(
defn.resource_group, defn.reserved_ip_address_name,
PublicIpAddress(
location = defn.location,
public_ip_allocation_method = defn.allocation_method,
idle_timeout_in_minutes = defn.idle_timeout,
tags = defn.tags,
dns_settings = dns_settings
))
self.state = self.UP
self.copy_properties(defn)
address = self.get_settled_resource()
self.ip_address = address.ip_address
self.fqdn = address.dns_settings and address.dns_settings.fqdn
self.log("reserved IP address: {0}".format(self.ip_address))
if self.fqdn:
self.log("got domain name: {0}".format(self.fqdn))
def create(self, defn, check, allow_reboot, allow_recreate):
self.no_subscription_id_change(defn)
self.no_location_change(defn)
self.no_property_change(defn, 'resource_group')
self.copy_mgmt_credentials(defn)
self.reserved_ip_address_name = defn.reserved_ip_address_name
self.resource_group = defn.resource_group
if check:
address = self.get_settled_resource()
if not address:
self.warn_missing_resource()
elif self.state == self.UP:
self.warn_if_failed(address)
self.handle_changed_property('location', normalize_location(address.location),
can_fix = False)
self.handle_changed_property('tags', address.tags)
self.handle_changed_property('ip_address', address.ip_address, property_name = '')
self.handle_changed_property('idle_timeout', address.idle_timeout_in_minutes)
self.handle_changed_property('allocation_method', address.public_ip_allocation_method)
_dns = address.dns_settings
self.handle_changed_property('domain_name_label',
_dns and _dns.domain_name_label)
self.handle_changed_property('reverse_fqdn',
_dns and _dns.reverse_fqdn)
self.handle_changed_property('fqdn', _dns and _dns.fqdn)
else:
self.warn_not_supposed_to_exist(valuable_resource = True)
self.confirm_destroy()
if self.state != self.UP:
if self.get_settled_resource():
raise Exception("tried creating a reserved IP address that already exists; "
"please run 'deploy --check' to fix this")
self.log("creating {0} in {1}...".format(self.full_name, defn.location))
self.create_or_update(defn)
if self.properties_changed(defn):
self.log("updating properties of {0}...".format(self.full_name))
self.get_settled_resource_assert_exists()
self.create_or_update(defn)
def create_after(self, resources, defn):
from nixops.resources.azure_resource_group import AzureResourceGroupState
return {r for r in resources
if isinstance(r, AzureResourceGroupState)} |
amwelch/a10sdk-python | a10sdk/core/configure/configure.py | Python | apache-2.0 | 818 | 0.012225 | from a10sdk.common.A10BaseClass import A10BaseClass
class Configure(A10BaseClass):
"""Class Description::
Configure sync Commands.
Class configure supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/configure`.
"""
def __init__(self, **kwargs):
self | .ERROR_MSG = ""
self.required=[]
self.b_key = "configure"
self.a10_url="/axapi/v3/configure"
self.DeviceProxy = " | "
self.sync = {}
for keys, value in kwargs.items():
setattr(self,keys, value)
|
opendatagroup/hadrian | titus/titus/lib/array.py | Python | apache-2.0 | 49,343 | 0.008735 | #!/usr/bin/env python
# Copyright (C) 2014 Open Data ("Open Data" refers to
# one or more of the following companies: Open Data Partners LLC,
# Open Data Research LLC, or Open Data Capital LLC.)
#
# This file is part of Hadrian.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import itertools
import j | son
from titus.fcn import Fcn
from titus.fcn import LibFcn
from titus.signature import Sig
from titus.signature import Sigs
from | titus.datatype import *
from titus.errors import *
from titus.util import callfcn, negativeIndex, startEnd
from titus.lib.core import INT_MIN_VALUE
from titus.lib.core import INT_MAX_VALUE
from titus.lib.core import LONG_MIN_VALUE
from titus.lib.core import LONG_MAX_VALUE
import titus.P as P
provides = {}
def provide(fcn):
provides[fcn.name] = fcn
prefix = "a."
anyNumber = set([AvroInt(), AvroLong(), AvroFloat(), AvroDouble()])
def toCmp(state, scope, lessThan):
return lambda a, b: -1 if callfcn(state, scope, lessThan, [a, b]) else 1 if callfcn(state, scope, lessThan, [b, a]) else 0
def toLt(state, scope, lessThan):
return lambda a, b: callfcn(state, scope, lessThan, [a, b])
def checkRange(length, index, code, fcnName, pos):
if index < 0 or index >= length:
raise PFARuntimeException("index out of range", code, fcnName, pos)
#################################################################### basic access
class Len(LibFcn):
name = prefix + "len"
sig = Sig([{"a": P.Array(P.Wildcard("A"))}], P.Int())
errcodeBase = 15000
def genpy(self, paramTypes, args, pos):
return "len({0})".format(*args)
def __call__(self, state, scope, pos, paramTypes, a):
return len(a)
provide(Len())
class Subseq(LibFcn):
name = prefix + "subseq"
sig = Sig([{"a": P.Array(P.Wildcard("A"))}, {"start": P.Int()}, {"end": P.Int()}], P.Array(P.Wildcard("A")))
errcodeBase = 15010
def genpy(self, paramTypes, args, pos):
return "{0}[{1}:{2}]".format(*args)
def __call__(self, state, scope, pos, paramTypes, a, start, end):
return a[start:end]
provide(Subseq())
class Head(LibFcn):
name = prefix + "head"
sig = Sig([{"a": P.Array(P.Wildcard("A"))}], P.Wildcard("A"))
errcodeBase = 15020
def __call__(self, state, scope, pos, paramTypes, a):
if len(a) == 0:
raise PFARuntimeException("empty array", self.errcodeBase + 0, self.name, pos)
else:
return a[0]
provide(Head())
class Tail(LibFcn):
name = prefix + "tail"
sig = Sig([{"a": P.Array(P.Wildcard("A"))}], P.Array(P.Wildcard("A")))
errcodeBase = 15030
def __call__(self, state, scope, pos, paramTypes, a):
if len(a) == 0:
raise PFARuntimeException("empty array", self.errcodeBase + 0, self.name, pos)
else:
return a[1:]
provide(Tail())
class Last(LibFcn):
name = prefix + "last"
sig = Sig([{"a": P.Array(P.Wildcard("A"))}], P.Wildcard("A"))
errcodeBase = 15040
def __call__(self, state, scope, pos, paramTypes, a):
if len(a) == 0:
raise PFARuntimeException("empty array", self.errcodeBase + 0, self.name, pos)
else:
return a[-1]
provide(Last())
class Init(LibFcn):
name = prefix + "init"
sig = Sig([{"a": P.Array(P.Wildcard("A"))}], P.Array(P.Wildcard("A")))
errcodeBase = 15050
def __call__(self, state, scope, pos, paramTypes, a):
if len(a) == 0:
raise PFARuntimeException("empty array", self.errcodeBase + 0, self.name, pos)
else:
return a[:-1]
provide(Init())
class SubseqTo(LibFcn):
name = prefix + "subseqto"
sig = Sig([{"a": P.Array(P.Wildcard("A"))}, {"start": P.Int()}, {"end": P.Int()}, {"replacement": P.Array(P.Wildcard("A"))}], P.Array(P.Wildcard("A")))
errcodeBase = 15060
def __call__(self, state, scope, pos, paramTypes, a, start, end, replacement):
normStart, normEnd = startEnd(len(a), start, end)
before = a[:normStart]
after = a[normEnd:]
return before + replacement + after
provide(SubseqTo())
#################################################################### searching
class Contains(LibFcn):
name = prefix + "contains"
sig = Sigs([Sig([{"haystack": P.Array(P.Wildcard("A"))}, {"needle": P.Array(P.Wildcard("A"))}], P.Boolean()),
Sig([{"haystack": P.Array(P.Wildcard("A"))}, {"needle": P.Wildcard("A")}], P.Boolean()),
Sig([{"haystack": P.Array(P.Wildcard("A"))}, {"needle": P.Fcn([P.Wildcard("A")], P.Boolean())}], P.Boolean())])
errcodeBase = 15070
def __call__(self, state, scope, pos, paramTypes, haystack, needle):
if isinstance(needle, (list, tuple)):
for start in xrange(len(haystack) - len(needle) + 1):
if needle == haystack[start:(start + len(needle))]:
return True
return False
elif callable(needle):
for item in haystack:
if callfcn(state, scope, needle, [item]):
return True
return False
else:
try:
haystack.index(needle)
except ValueError:
return False
else:
return True
provide(Contains())
class Count(LibFcn):
name = prefix + "count"
sig = Sigs([Sig([{"haystack": P.Array(P.Wildcard("A"))}, {"needle": P.Array(P.Wildcard("A"))}], P.Int()),
Sig([{"haystack": P.Array(P.Wildcard("A"))}, {"needle": P.Wildcard("A")}], P.Int()),
Sig([{"a": P.Array(P.Wildcard("A"))}, {"predicate": P.Fcn([P.Wildcard("A")], P.Boolean())}], P.Int())])
errcodeBase = 15080
def __call__(self, state, scope, pos, paramTypes, haystack, needle):
if len(haystack) == 0:
return 0
else:
if isinstance(needle, (list, tuple)):
if len(needle) == 0:
return 0
else:
count = 0
for start in xrange(len(haystack) - len(needle) + 1):
if needle == haystack[start:(start + len(needle))]:
count += 1
return count
elif callable(needle):
count = 0
for item in haystack:
if callfcn(state, scope, needle, [item]):
count += 1
return count
else:
return haystack.count(needle)
provide(Count())
class Index(LibFcn):
name = prefix + "index"
sig = Sigs([Sig([{"haystack": P.Array(P.Wildcard("A"))}, {"needle": P.Array(P.Wildcard("A"))}], P.Int()),
Sig([{"haystack": P.Array(P.Wildcard("A"))}, {"needle": P.Wildcard("A")}], P.Int()),
Sig([{"haystack": P.Array(P.Wildcard("A"))}, {"needle": P.Fcn([P.Wildcard("A")], P.Boolean())}], P.Int())])
errcodeBase = 15090
def __call__(self, state, scope, pos, paramTypes, haystack, needle):
if isinstance(needle, (list, tuple)):
for start in xrange(len(haystack) - len(needle) + 1):
if needle == haystack[start:(start + len(needle))]:
return start
return -1
elif callable(needle):
for index, item in enumerate(haystack):
if callfcn(state, scope, needle, [item]):
return index
return -1
else:
try:
return haystack.index(needle)
except ValueError:
return -1
provide(Index())
class RIndex(LibFcn):
name = prefix + "rindex"
sig = Sigs([Sig([{"haystack": P.Array(P.Wildcard("A") |
jim-easterbrook/pyctools-demo | src/scripts/temporal_alias/stage_2.py | Python | gpl-3.0 | 2,808 | 0.005342 | #!/usr/bin/env python
# File written by pyctools-editor. Do not edit.
import argparse
import logging
from pyctools.core.compound import Compound
import pyctools.components.arithmetic
import pyctools.components.qt.qtdisplay
import pyctools.components.zone.zoneplategenerator
class Network(object):
components = \
{ 'clipper': { 'class': 'pyctools.components.arithmetic.Arithmetic',
'config': "{'func': '16+((data > 180)*219)'}",
'pos': (200.0, 200.0)},
'clipper2': { 'class': 'pyctools.components.arithmetic.Arithmetic',
'config': "{'func': '16+((data > 230)*219)'}",
'pos': (200.0, 330.0)},
'qd': { 'class': 'pyctools.components.qt.qtdisplay.QtDisplay',
'config': "{'framerate': 60}",
'pos': (460.0, 200.0)},
'stacker': { 'class': 'pyctools.components.arithmetic.Arithmetic2',
'config': "{'func': 'numpy.vstack((data1,data2))'}",
'pos': (330.0, 200.0)},
'zpg': { 'class': 'pyctools.components.zone.zoneplategenerator. | ZonePlateGenerator',
'config': "{'kx': 0 | .04, 'kt': -0.34, 'xlen': 600, 'ylen': "
"400, 'zlen': 1000, 'looping': 'repeat'}",
'pos': (70.0, 200.0)},
'zpg2': { 'class': 'pyctools.components.zone.zoneplategenerator.ZonePlateGenerator',
'config': "{'kx': 0.002, 'kt': -0.017, 'xlen': 600, 'ylen': "
"200, 'zlen': 1000, 'looping': 'repeat'}",
'pos': (70.0, 330.0)}}
linkages = \
{ ('clipper', 'output'): [('stacker', 'input1')],
('clipper2', 'output'): [('stacker', 'input2')],
('stacker', 'output'): [('qd', 'input')],
('zpg', 'output'): [('clipper', 'input')],
('zpg2', 'output'): [('clipper2', 'input')]}
def make(self):
comps = {}
for name, component in self.components.items():
comps[name] = eval(component['class'])(config=eval(component['config']))
return Compound(linkages=self.linkages, **comps)
if __name__ == '__main__':
from PyQt5 import QtCore, QtWidgets
QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_X11InitThreads)
app = QtWidgets.QApplication([])
comp = Network().make()
cnf = comp.get_config()
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
cnf.parser_add(parser)
parser.add_argument('-v', '--verbose', action='count', default=0,
help='increase verbosity of log messages')
args = parser.parse_args()
logging.basicConfig(level=logging.ERROR - (args.verbose * 10))
del args.verbose
cnf.parser_set(args)
comp.set_config(cnf)
comp.start()
app.exec_()
comp.stop()
comp.join()
|
yuce/pyswip | tests/test_prolog.py | Python | mit | 4,048 | 0.001977 | # -*- coding: utf-8 -*-
# pyswip -- Python SWI-Prolog bridge
# Copyright (c) 2007-2012 Yüce Tekol
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Tests the Prolog class.
"""
import unittest
import doctest
import pyswip.prolog as pl # This implicitly tests library loading code
class TestProlog(unittest.TestCase):
"""
Unit tests for prolog module (contains only Prolog class).
"""
def test_nested_queries(self):
"""
SWI-Prolog cannot have nested queries called by the foreign function
interface, that is, if we open a query and are getting results from it,
we cannot open another query before closing that one.
Since this is a user error, we just ensure that a appropriate error
message is thrown.
"""
p = pl.Prolog()
# Add something to the base
p.assertz("father(john,mich)")
p.assertz("father(john,gina)")
p.assertz("mother(jane,mich)")
somequery = "father(john, Y)"
otherquery = "mother(jane, X)"
# This should not throw an excepti | on
for _ in p.query(somequery):
pass
for _ in p.query(otherquery):
pass
with self.assertRaises(pl.NestedQueryError):
for q in p.query(somequery):
for j in p.query(otherquery):
# This should throw an error, because I opened the second
# query
pass
def test_prolog_functor_in_list(self):
p = pl.Prolog()
p.assertz('f([g | (a,b),h(a,b,c)])')
self.assertEqual([{"L": [u"g(a, b)", u"h(a, b, c)"]}], list(p.query("f(L)")))
p.retract("f([g(a,b),h(a,b,c)])")
def test_prolog_functor_in_functor(self):
p = pl.Prolog()
p.assertz("f([g([h(a,1), h(b,1)])])")
self.assertEqual([{'G': [u"g(['h(a, 1)', 'h(b, 1)'])"]}], list(p.query('f(G)')))
p.assertz("a([b(c(x), d([y, z, w]))])")
self.assertEqual([{'B': [u"b(c(x), d(['y', 'z', 'w']))"]}], list(p.query('a(B)')))
p.retract("f([g([h(a,1), h(b,1)])])")
p.retract("a([b(c(x), d([y, z, w]))])")
def test_prolog_strings(self):
"""
See: https://github.com/yuce/pyswip/issues/9
"""
p = pl.Prolog()
p.assertz('some_string_fact("abc")')
self.assertEqual([{"S": b"abc"}], list(p.query("some_string_fact(S)")))
def test_quoted_strings(self):
"""
See: https://github.com/yuce/pyswip/issues/90
"""
p = pl.Prolog()
self.assertEqual([{"X": b"a"}], list(p.query('X = "a"')))
p.assertz('test_quoted_strings("hello","world")')
self.assertEqual([{"A": b"hello", "B": b"world"}], list(p.query('test_quoted_strings(A,B)')))
def test_prolog_read_file(self):
"""
See: https://github.com/yuce/pyswip/issues/10
"""
prolog = pl.Prolog()
prolog.consult("tests/test_read.pl")
list(prolog.query('read_file("tests/test_read.pl", S)'))
|
google-research/language | language/conpono/create_pretrain_data/books_preproc_pipeline.py | Python | apache-2.0 | 8,949 | 0.00894 | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Beam pipeline to convert BooksCorpus to shareded TFRecords."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import hashlib
import os
import random
from absl import app
from absl import flags
import apache_beam as beam
from bert import tokenization
from language.conpono.create_pretrain_data.preprocessing_utils import convert_instance_to_tf_example
from language.conpono.create_pretrain_data.preprocessing_utils import create_instances_from_document
from language.conpono.create_pretrain_data.preprocessing_utils import create_paragraph_order_from_document
import nltk
from nltk.tokenize import sent_tokenize
import tensorflow.compat.v1 as tf
FORMAT_BINARY = "binary"
FORMAT_PARAGRAPH = "paragraph"
flags.DEFINE_string("input_file", None, "Path to raw input files.")
flags.DEFINE_string("output_file", None, "Output TF example file.")
flags.DEFINE_string("vocab_file", None,
"The vocabulary file that the BERT model was trained on.")
flags.DEFINE_integer("max_seq_length", 512, "Maximum sequ | ence length.")
flags.DEFINE_float("test_size", 0.1,
"Size of test set by factor of total dataset.")
flags.DEFINE_float("dev_size", 0.1,
"Size of dev set by factor of total dataset.")
flags.DEFINE_int | eger("random_seed", 12345, "A random seed")
flags.DEFINE_bool(
"do_lower_case", True,
"Whether to lower case the input text. Should be True for uncased "
"models and False for cased models.")
flags.DEFINE_enum(
"format", FORMAT_PARAGRAPH, [FORMAT_BINARY, FORMAT_PARAGRAPH],
"Build a dataset of either binary order or paragraph reconstrucition")
FLAGS = flags.FLAGS
def read_file(filename):
"""Read the contents of filename (str) and split into documents by chapter."""
all_documents = []
document = []
with tf.gfile.GFile(filename, "r") as reader:
for line in reader:
line = line.strip()
if not line:
continue
if line.lower()[:7] == "chapter":
if document:
all_documents.append(document)
document = []
else:
document.append(line)
if document:
all_documents.append(document)
return all_documents
def split_line_by_sentences(line):
return sent_tokenize(line)
def preproc_doc(document):
"""Convert document to list of TF Examples for binary order classification.
Args:
document: a chapter from one book as a list of lines
Returns:
A list of tfexamples of binary orderings of pairs of sentences in the
document. The tfexamples are serialized to string to be written directly
to TFRecord.
"""
# Each document is a list of lines
tokenizer = tokenization.FullTokenizer(
vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case)
# set a random seed for reproducability
# since this function is run in parallel, if we hardcode a seed, all
# documents will have the same permutations. Instead we use the hash of the
# first sentence as the seed so it is different for each document and it
# is still reproducible.
hash_object = hashlib.md5(document[0])
rng = random.Random(int(hash_object.hexdigest(), 16) % (10**8))
# Each document is composed of a list of sentences. We create paragraphs
# by keeping together sentences on the same line and adding adjacent sentences
# if there are fewer than 5 to form the paragraph.
# The utility functions below expect the document to be split by paragraphs.
list_of_paragraphs = []
paragraph = []
for line in document:
line = tokenization.convert_to_unicode(line)
line = line.replace(u"\u2018", "'").replace(u"\u2019", "'")
sents = split_line_by_sentences(line)
for sent in sents:
tokens = tokenizer.tokenize(sent)
if tokens:
paragraph.append(tokens)
if len(paragraph) > 5:
list_of_paragraphs.append(paragraph)
paragraph = []
# In case of any empty paragraphs, remove them.
list_of_paragraphs = [x for x in list_of_paragraphs if x]
# Convert the list of paragraphs into TrainingInstance object
# See preprocessing_utils.py for definition
if FLAGS.format == FORMAT_BINARY:
instances = create_instances_from_document(list_of_paragraphs,
FLAGS.max_seq_length, rng)
elif FLAGS.format == FORMAT_PARAGRAPH:
instances = create_paragraph_order_from_document(list_of_paragraphs,
FLAGS.max_seq_length, rng)
# Convert token lists into ids and add any needed tokens and padding for BERT
tf_examples = [
convert_instance_to_tf_example(tokenizer, instance,
FLAGS.max_seq_length)[0]
for instance in instances
]
# Serialize TFExample for writing to file.
tf_examples = [example.SerializeToString() for example in tf_examples]
return tf_examples
def books_pipeline():
"""Read Books Corpus filenames and create Beam pipeline."""
# set a random seed for reproducability
rng = random.Random(FLAGS.random_seed)
# BooksCorpus is organized into directories of genre and files of books
# adventure-all.txt seems to contain all the adventure books in 1 file
# romance-all.txt is the same. None of the other directories have this,
# so we will skip it to not double count those books
file_name_set = set()
input_files_by_genre = collections.defaultdict(list)
for path, _, fnames in tf.gfile.Walk(FLAGS.input_file):
genre = path.split("/")[-1]
for fname in fnames:
if fname == "adventure-all.txt" or fname == "romance-all.txt":
continue
if fname in file_name_set:
continue
file_name_set.add(fname)
input_files_by_genre[genre].append(path + "/" + fname)
# Sort genres and iterate in order for reproducability
train_files, dev_files, test_files = [], [], []
for genre, file_list in sorted(input_files_by_genre.items()):
rng.shuffle(file_list)
genre_size = len(file_list)
test_size = int(FLAGS.test_size * genre_size)
dev_size = int(FLAGS.dev_size * genre_size)
test_files.extend(file_list[:test_size])
dev_files.extend(file_list[test_size:test_size + dev_size])
train_files.extend(file_list[test_size + dev_size:])
assert len(file_list[:test_size]) + \
len(file_list[test_size:test_size+dev_size]) + \
len(file_list[test_size+dev_size:]) == len(file_list)
# make sure there is no test train overlap
for filename in train_files:
assert filename not in test_files
assert filename not in dev_files
for filename in dev_files:
assert filename not in test_files
rng.shuffle(train_files)
rng.shuffle(dev_files)
rng.shuffle(test_files)
def pipeline(root):
"""Beam pipeline for converting Books Corpus files to TF Examples."""
_ = (
root | "Create test files" >> beam.Create(test_files)
| "Read test files" >> beam.FlatMap(read_file)
| "test Shuffle" >> beam.Reshuffle()
| "Preproc test docs" >> beam.FlatMap(preproc_doc)
| "record test Shuffle" >> beam.Reshuffle()
| "Write to test tfrecord" >> beam.io.WriteToTFRecord(
FLAGS.output_file + "." + FLAGS.format + ".test.tfrecord",
num_shards=100))
_ = (
root | "Create dev files" >> beam.Create(dev_files)
| "Read dev files" >> beam.FlatMap(read_file)
| "dev Shuffle" >> beam.Reshuffle()
| "Preproc dev docs" >> beam.FlatMap(preproc_doc)
| "record dev Shuffle" >> |
fhdk/pacman-mirrors | pacman_mirrors/translation/i18n.py | Python | gpl-3.0 | 1,832 | 0 | #!/usr/bin/env python
#
# This file is part of pacman-mirrors.
#
# pacman-mirrors is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pacman-mirrors is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pacman-mirrors. If not, see <http://www.gnu.org/licenses/>.
#
# from https://wiki.maemo.org/Internationalize_a_Python_application
"""Pacman-Mirrors Translation Module"""
import os
import sys
import locale
import gettext
# The translation files will be under
# @ | LOCALE_DIR@/@LANGUAGE@/LC_MESSAGES/@APP_NAME@.mo
APP_NAME = "pacman_mirrors"
APP_DIR = os.path.join(sys.prefix, "share")
LOCALE_DIR = os.path.join(APP_DIR, "locale")
CODESET = "utf-8"
# Now we need to choose the language. We will provide a list, and gettext
# will use the first translation available in the list
LANGUAGES = []
try:
user_locale = locale.getdefaultlocale()[0]
if | user_locale:
LANGUAGES += user_locale
except ValueError:
pass
LANGUAGES += os.environ.get("LANGUAGE", "").split(":")
LANGUAGES += ["en_US"]
# Lets tell those details to gettext
# (nothing to change here for you)
gettext.install(True)
gettext.bindtextdomain(APP_NAME, LOCALE_DIR)
gettext.bind_textdomain_codeset(APP_NAME, codeset=CODESET)
gettext.textdomain(APP_NAME)
language = gettext.translation(APP_NAME, LOCALE_DIR, LANGUAGES, fallback=True)
# Add this to every module:
#
# import i18n
# _ = i18n.language.gettext
|
megaserg/pants | tests/python/pants_test/backend/codegen/tasks/test_apache_thrift_gen.py | Python | apache-2.0 | 2,555 | 0.003914 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from textwrap import dedent
from pants.backend.codegen.targets.java_thrift_library import JavaThriftLibrary
from pants.backend.codegen.tasks.apache_thrift_gen import ApacheThriftGen
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants_test.tasks.task_test_base import TaskTestBase
class ApacheThriftGenTest(TaskTestBase):
@classmethod
def task_type(cls):
return A | pacheThriftGen
def setUp(self):
super(ApacheThriftGenTest, self).setUp()
def generat | e_single_thrift_target(self, java_thrift_library):
context = self.context(target_roots=[java_thrift_library])
apache_thrift_gen = self.create_task(context)
apache_thrift_gen.execute()
def is_synthetic_java_library(target):
return isinstance(target, JavaLibrary) and target.is_synthetic
synthetic_targets = context.targets(predicate=is_synthetic_java_library)
self.assertEqual(1, len(synthetic_targets))
return synthetic_targets[0]
def test_single_namespace(self):
self.create_file('src/thrift/com/foo/one.thrift', contents=dedent("""
namespace java com.foo
struct One {}
"""))
one = self.make_target(spec='src/thrift/com/foo:one',
target_type=JavaThriftLibrary,
sources=['one.thrift'],
compiler='thrift')
synthetic_target = self.generate_single_thrift_target(one)
self.assertEqual(['com/foo/One.java'], list(synthetic_target.sources_relative_to_source_root()))
def test_nested_namespaces(self):
self.create_file('src/thrift/com/foo/one.thrift', contents=dedent("""
namespace java com.foo
struct One {}
"""))
self.create_file('src/thrift/com/foo/bar/two.thrift', contents=dedent("""
namespace java com.foo.bar
struct Two {}
"""))
one = self.make_target(spec='src/thrift/com/foo:one',
target_type=JavaThriftLibrary,
sources=['one.thrift', 'bar/two.thrift'],
compiler='thrift')
synthetic_target = self.generate_single_thrift_target(one)
self.assertEqual(sorted(['com/foo/One.java', 'com/foo/bar/Two.java']),
sorted(synthetic_target.sources_relative_to_source_root()))
|
morelab/weblabdeusto | server/src/test/unit/weblab/lab/test_status_handler.py | Python | bsd-2-clause | 3,023 | 0.000992 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005 onwards University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Jaime Irurzun <jaime.irurzun@gmail.com>
#
from __future__ import print_function, unicode_literals
import unittest
from weblab.lab.status_handler import WebcamIsUpAndRunningHandler, HostIsUpAndRunningHandler, AbstractLightweightIsUpAndRunningHandler
import weblab.lab.exc as LaboratoryErrors
import test.unit.weblab.lab.fake_urllib2 as FakeUrllib2
| import test.unit.weblab.lab.fake_socket as FakeSocket
class AbstractLightweightIsUpAndRunningHandlerTestCase(unittest.TestCase):
def test_not_implemented(self):
self.assertRaises(
TypeError,
AbstractLightweightIsUpAndRunningHandler
)
class WebcamIsUpAndRunningHandlerTestCase(unittest.TestCase):
def setUp(self):
self.handler = WebcamIsUpAndRunningHandler("https://...")
FakeUrllib2.rese | t()
self.handler._urllib2 = FakeUrllib2
def test_run_ok(self):
FakeUrllib2.expected_action = FakeUrllib2.HTTP_OK
self.handler.run()
def test_run_exception_bad_response(self):
FakeUrllib2.expected_action = FakeUrllib2.HTTP_URL_ERROR
self.assertRaises(
LaboratoryErrors.ImageURLDidNotRetrieveAResponseError,
self.handler.run
)
def test_run_exception_bad_content(self):
FakeUrllib2.expected_action = FakeUrllib2.HTTP_BAD_CONTENT
self.assertRaises(
LaboratoryErrors.InvalidContentTypeRetrievedFromImageURLError,
self.handler.run
)
def test_run_times(self):
messages = self.handler.run_times()
self.assertEquals([], messages)
FakeUrllib2.expected_action = FakeUrllib2.HTTP_BAD_CONTENT
messages = self.handler.run_times()
self.assertEquals(WebcamIsUpAndRunningHandler.DEFAULT_TIMES, len(messages))
class HostIsUpAndRunningHandlerTestCase(unittest.TestCase):
def setUp(self):
FakeSocket.reset()
self.handler = HostIsUpAndRunningHandler("hostname", 80)
self.handler._socket = FakeSocket
def test_run_ok(self):
FakeSocket.expected_action = FakeSocket.OK
self.handler.run()
def test_run_error(self):
FakeSocket.expected_action = FakeSocket.ERROR
self.assertRaises(
LaboratoryErrors.UnableToConnectHostnameInPortError,
self.handler.run
)
def suite():
return unittest.TestSuite(
(
unittest.makeSuite(AbstractLightweightIsUpAndRunningHandlerTestCase),
unittest.makeSuite(WebcamIsUpAndRunningHandlerTestCase),
unittest.makeSuite(HostIsUpAndRunningHandlerTestCase),
)
)
if __name__ == '__main__':
unittest.main()
|
benwhalley/statpipe | setup.py | Python | mit | 448 | 0 | from dis | tutils.core import setup
setup(
name='Statpipe',
version='0.1.9',
author='Ben Whalley',
author_email='benwhalley@gmail.com',
packages=['statpipe'],
scripts=['bin/statpipe', 'bin/statpipe_image'],
url='https://github.com/benwhalley/statpipe',
license='LICENSE.txt',
description='Pipe stuff to Stata, get results back.',
long_description=open('README.rst').read(),
install_requires=['clint', ] | ,
)
|
geopython/QGIS | python/plugins/processing/algs/grass7/ext/r_li_renyi_ascii.py | Python | gpl-2.0 | 1,538 | 0 | # -*- coding: utf-8 -*-
"""
***************************************************************************
r_li_renyi_ascii.py
-------------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
******************************************** | *******************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C | ) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from .r_li import checkMovingWindow, configFile, moveOutputTxtFile
def checkParameterValuesBeforeExecuting(alg, parameters, context):
return checkMovingWindow(alg, parameters, context, True)
def processCommand(alg, parameters, context, feedback):
configFile(alg, parameters, context, feedback, True)
def processOutputs(alg, parameters, context, feedback):
moveOutputTxtFile(alg, parameters, context)
|
cuauv/software | mission/missions/old/2017/aslam_buoys.py | Python | bsd-3-clause | 7,476 | 0.0103 | from mission.constants.config import PIPE_SEARCH_DEPTH
from mission.framework.combinators import *
from mission.framework.primitive import *
from mission.framework.position import *
from mission.framework.movement import *
from mission.framework.task import *
from mission.framework.timing import *
from mission.framework.wiggling import *
from mission.constants.c | onfig import *
from mission.missions.ozer_common import SequentialSuccess, Conditional, Retry
from mission.opt_aux.aux import *
from mission.missions.buoys import Scuttle
import aslam
import shm, time, math
import numpy as n
from auv_math.math_utils impor | t rotate
class GoToPipe(Task):
"""
Move to and align with the pipe after buoys
"""
def on_first_run(self, *args, **kwargs):
pipe_results = shm.buoys_pipe_results.get()
self.task = Sequential(
Log('Returning to pipe position'),
GoToPosition(
pipe_results.north,
pipe_results.east,
depth=pipe_results.depth,
optimize=True,
),
Log('Aligning with pipe'),
Heading(pipe_results.heading),
)
def on_run(self, *args, **kwargs):
if not self.task.finished:
self.task()
else:
self.finish()
class Timeout(Task):
def on_first_run(self, time, task, *args, **kwargs):
self.timer = Timer(time)
def on_run(self, time, task, *args, **kwargs):
task()
self.timer()
if task.finished:
self.finish()
elif self.timer.finished:
self.logw('Task timed out in {} seconds!'.format(time))
self.finish()
# Will simply override the desired depth set by a task directly after it is called.
class MinDepth(Task):
def on_run(self, min_depth, subtask):
actual = shm.kalman.depth.get()
if actual > min_depth:
subtask()
else:
self.logw('Actual depth {} less than minimum of {}; NOT running task!'.format(actual, min_depth))
desire = shm.desires.depth.get()
if desire < min_depth:
self.logw('Desired depth {} less than minimum of {}; overriding with minimum!'.format(desire, min_depth))
shm.desires.depth.set(min_depth)
if subtask.finished:
self.finish()
class GrabHeading(Task):
def on_run(self):
heading = shm.kalman.heading.get()
self.logw('Grabbed current sub heading of {}'.format(heading))
shm.buoys_mission.heading.set(heading)
self.finish()
class GrabPosition(Task):
def on_run(self):
pos = aslam.sub.position()
self.logw('Grabbed position of N {}, E {}, D {}'.format(pos[0], pos[1], pos[2]))
shm.buoys_mission.north.set(pos[0])
shm.buoys_mission.east.set(pos[1])
shm.buoys_mission.depth.set(pos[2])
self.finish()
class RestoreHeading(Task):
def on_first_run(self):
self.saved = shm.buoys_mission.heading.get()
self.logw('Restoring sub heading of {}'.format(self.saved))
def on_run(self):
task = Heading(self.saved)
task()
if task.finished:
self.finish()
class RestorePosition(Task):
def on_first_run(self):
self.saved = [shm.buoys_mission.north.get(), shm.buoys_mission.east.get(), shm.buoys_mission.depth.get()]
self.logw('Restoring saved position of N {}, E {}, D {}'.format(*self.saved))
def on_run(self):
task = GoToPosition(self.saved[0], self.saved[1], depth = self.saved[2])
task()
if task.finished:
self.finish()
Scan = Sequential(
MoveYRough(2.0),
MoveYRough(-4.0),
MoveYRough(4.0),
MoveYRough(-4.0),
MoveYRough(2.0)
)
boundingBox = lambda pos: (pos - n.array([0.2, 0.2, 0.2]), pos + n.array([0.2, 0.2, 0.2]))
tolerance = n.array([0.05, 0.05, 0.05])
class TouchGuarded(Task):
def on_run(self, subtask, sensor):
subtask()
if subtask.finished or not sensor.get():
self.finish()
class AvoidYellow(Task):
def on_first_run(self):
self.heading = shm.kalman.heading.get()
self.red_buoy = aslam.world.red_buoy.position()[:2]
self.green_buoy = aslam.world.green_buoy.position()[:2]
self.yellow_buoy = aslam.world.yellow_buoy.position()[:2]
self.all_buoys = [('red', self.red_buoy), ('green', self.green_buoy), ('yellow', self.yellow_buoy)]
self.sorted_buoys = sorted(self.all_buoys, key = lambda x: rotate(x[1], -self.heading)[1])
self.logi('Sorted buoys (left-to-right): {}'.format([x[0] for x in self.sorted_buoys]))
subtasks = []
subtasks.append(MasterConcurrent(HPRWiggle(), MoveXRough(-1.0)))
subtasks.append(Depth(PIPE_SEARCH_DEPTH))
if self.sorted_buoys[0][0] == 'yellow':
# yellow buoy far left, go right
subtasks.append(MoveYRough(1.0))
elif self.sorted_buoys[1][0] == 'yellow':
subtasks.append(MoveYRough(1.0))
else:
subtasks.append(MoveYRough(-1.0))
subtasks.append(MoveXRough(1.0))
center_buoy = n.array(self.sorted_buoys[1][1])
center_buoy += n.array(rotate((1, 0), self.heading)) # 1m beyond center buoy
subtasks.append(GoToPosition(center_buoy[0], center_buoy[1], depth=PIPE_SEARCH_DEPTH))
self.subtask = Sequential(*subtasks)
def on_run(self):
self.subtask()
if self.subtask.finished:
self.finish()
class AllBuoys(Task):
def desiredModules(self):
return [shm.vision_modules.Buoys]
def on_first_run(self):
self.has_made_progress = False
shm.navigation_settings.optimize.set(False)
delta_red = aslam.world.red_buoy.position() - aslam.sub.position()
delta_red /= n.linalg.norm(delta_red)
delta_red *= -1
delta_green = aslam.world.green_buoy.position() - aslam.sub.position()
delta_green /= n.linalg.norm(delta_green)
delta_green *= -1
delta_yellow = aslam.world.yellow_buoy.position() - aslam.sub.position()
delta_yellow /= n.linalg.norm(delta_yellow)
delta_yellow *= -1
subtasks = []
# subtasks.append(GoToPipe())
subtasks.append(MoveXRough(PIPE_TO_BUOYS_DIST))
subtasks.append(Depth(BUOY_SEARCH_DEPTH))
subtasks.append(GrabPosition())
subtasks.append(GrabHeading())
subtasks.append(Scan)
if 1:
subtasks += [
Timeout(20.0, SequentialSuccess(
aslam.Target(aslam.world.red_buoy, delta_red, tolerance, boundingBox(delta_red * 2), orient = True)),
RelativeToInitialDepth(0.05),
Timeout(5.0, TouchGuarded(MoveXRough(1.3), shm.gpio.wall_1)),
),
RestorePosition(),
RestoreHeading()
]
if 1:
subtasks += [
Timeout(20.0, SequentialSuccess(
aslam.Target(aslam.world.green_buoy, delta_green, tolerance, boundingBox(delta_green * 2), orient = True)),
RelativeToInitialDepth(0.1),
Timeout(5.0, TouchGuarded(MoveXRough(1.3), shm.gpio.wall_1)),
),
RestorePosition(),
RestoreHeading()
]
if 1:
subtasks += [
Timeout(20.0, SequentialSuccess(
aslam.Target(aslam.world.yellow_buoy, delta_yellow, tolerance, boundingBox(delta_yellow * 2), orient = True)),
RelativeToInitialDepth(0.1),
GuardedTimer(10.0, Scuttle(), aslam.SimpleTarget(aslam.world.yellow_buoy, delta_yellow)),
),
RestorePosition(),
RestoreHeading(),
AvoidYellow()
]
subtasks.append(RestoreHeading())
self.subtask = MinDepth(0.1, Sequential(*subtasks))
def on_run(self):
self.subtask()
if self.subtask.finished:
self.finish()
|
icflix/nagios | nagios_receiver_config.py | Python | bsd-3-clause | 265 | 0 | """Nagios Receiver config."""
SHARED_KEY = ''
CFG_DIR = | '/var/lib/nagios/remoteconfigs/'
RESULTS_DIR = '/var/lib/nagios/remoteresults/'
COMPONENTS = {
'configs': 'config',
'results': 'result',
}
# Maximum Content-Length | 1MB ?
CONTENT_LENGTH_MAX = 1048576
|
MrZigler/UnderdogMilitia | landplots.py | Python | gpl-3.0 | 1,652 | 0.02724 | import time
import random
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
BLUE = (0, 0, 255)
GREEN = (0, 255, 0)
RED = (255, 0, 0)
PURPLE = (255, 0, 255)
BROWN = (160, 82, 45)
class Landplots():
def __init__(self, owner, umtype, locationregion, jurisdiction, umvalue, inventory):
self.owner = owner
self.umtype = umtype
self.locationregion = locationregion
self.jurisdiction = jurisdiction
self.umvalue = umvalue
self.inventory = inventory
'''
class Umbuildings(): blacksmithshop (anvil, fire, metal) safehouse, farms (hulgerculter, agria, forest, hort, mixxed)
library (books )
def __init__(self, owner):
self.owner = owner '''
''' mines (gold, silver, copper, rareearths, titanium) smelter_furnace (eros to bars)
mints (coins) '''
'''Farm - umtype (1 depleted agra, 5 agra, 10, hort, 15 hort hugercult) locationregion (reseved), jurisdiction (1 heavyzoning & regulations,
5 minimum zoning but taxed, 10 no zoning regulations or taxes) umvalue (1 low to 100 highest value)
inventory (improvements, farmanimals, chicken coops, barns etc)
'''
class Farm(Landplots):
| def __init__(self, owner, umtype, locationregion, jurisdiction, umvalue, inventory):
Landplots.__init__(self, owner, umtype, locationregion, jurisdiction, umvalue, inventory)
def producefood(self):
| self.umvalue + self.jurisdiction
#orchard1 = Farm('Platinum Falcon', 5, 7, 42, 37, {})
|
ziggi/pawn-sublime-language | OpenWikiOnString.py | Python | mit | 233 | 0.021459 | import sublime, sublime_plugin
import webbrowser
class OpenW | ikiOnString(sublime_plugin.TextCommand):
def run(self, | edit):
query = self.view.substr(self.view.sel()[0])
webbrowser.open_new("http://wiki.sa-mp.com/wiki/" + query)
|
underlost/Replica | replica/contrib/blip/dashboard/views.py | Python | mit | 5,663 | 0.007946 | from __future__ import absolute_import
import logging
from django.shortcuts import render_to_response, render, get_object_or_404, redirect
from django | .template import RequestContext
from django.c | ontrib import messages
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.views.generic.list import ListView
from replica.contrib.blip.models import Timeline, Blip
from replica.contrib.blip.forms import TimelineModelForm, BlipModelForm
class LatestBlipsListViewMobile(ListView):
paginate_by = 25
template_name = 'replica/dashboard/blip/blip_list.html'
def get_queryset(self):
return Blip.objects.filter(user=self.request.user).order_by('-pub_date')
def get_context_data(self, **kwargs):
context = super(LatestBlipsListViewMobile, self).get_context_data(**kwargs)
context.update({'hide_timeline': True, 'nav_title': 'All Blips',})
return context
class TimelinesListView(ListView):
paginate_by = 25
template_name = 'replica/dashboard/blip/timeline_list.html'
def get_queryset(self):
return Timeline.objects.filter(user=self.request.user).order_by('-pub_date')
def get_context_data(self, **kwargs):
context = super(TimelinesListView, self).get_context_data(**kwargs)
context.update({ 'nav_title': 'Timelines',})
return context
class TimelineBlipListView(ListView):
paginate_by = 100
template_name = 'replica/dashboard/blip/blip_list.html'
def get_queryset(self):
self.timeline = get_object_or_404(Timeline, slug=self.kwargs.pop('timeline_slug'))
b = Blip.objects.filter(user=self.request.user).filter(timeline=self.timeline)
if self.timeline.rev_order == True:
return b.order_by('-pub_date')
else:
return b.order_by('pub_date')
def get_context_data(self, **kwargs):
context = super(TimelineBlipListView, self).get_context_data(**kwargs)
context.update({'timeline': self.timeline, 'nav_title': self.timeline.name,})
return context
def AddTimeline(request):
#add a timeline.
instance = Timeline(user=request.user)
f = TimelineModelForm(request.POST or None, instance=instance)
if f.is_valid():
f.save()
messages.add_message(
request, messages.INFO, 'New list created.')
return redirect('Replica:Blip-Timelines')
ctx = {'form': f, 'adding': True}
return render(request, 'replica/dashboard/blip/edit_timeline.html', ctx)
def EditTimeline(request, timeline_slug):
#Lets a user edit a blip they've previously added.
timeline = get_object_or_404(Timeline, slug=timeline_slug)
f = TimelineModelForm(request.POST or None, instance=timeline)
if f.is_valid():
f.save()
return redirect('Replica:Blip-Add-To-Timeline', timeline_slug=timeline_slug)
ctx = {'form': f, 'timeline': timeline, 'adding': False}
return render(request, 'replica/dashboard/blip/edit_timeline.html', ctx)
def SingleBlip(request, blip_guid):
#Shows a single blip.
blip = get_object_or_404(Blip, guid=blip_guid)
if blip.timeline:
recent_blips = Blip.objects.filter(timeline__id=blip.timeline.id, is_private=False)[:5]
ctx = {'blip': blip, 'recent_blips': recent_blips}
else:
ctx = {'blip': blip}
return render(request, 'replica/dashboard/blip/single_blip.html', ctx)
def AddBlip(request, timeline_slug=None):
object_list = Blip.objects.filter(user=request.user).order_by('-pub_date')[:10]
instance = Blip(user=request.user)
f = BlipModelForm(request.POST or None, instance=instance)
if f.is_valid():
f.save()
messages.add_message(
request, messages.INFO, 'Blip Added.')
return redirect('Replica:Blip:Index')
ctx = {'form': f, 'object_list': object_list, 'adding': True, 'blip_submit': True, 'hide_timeline': True, 'nav_title': 'All Blips', }
return render(request, 'replica/dashboard/blip/blip_list.html', ctx)
def AddBlipToTimeline(request, timeline_slug):
ft = get_object_or_404(Timeline, slug=timeline_slug)
if ft.rev_order == True:
b = Blip.objects.filter(user=request.user).filter(timeline=ft).order_by('-pub_date')[:10]
else:
b = Blip.objects.filter(user=request.user).filter(timeline=ft).order_by('pub_date')[:10]
instance = Blip(user=request.user, timeline=ft)
f = BlipModelForm(request.POST or None, instance=instance)
if f.is_valid():
f.save()
messages.add_message(
request, messages.INFO, 'Blip Added.')
return redirect('Replica:Blip:Timeline', timeline_slug=timeline_slug)
ctx = {'form': f, 'timeline': ft, 'adding': True, 'blip_submit': True, 'nav_title': ft.name, 'object_list': b, }
return render(request, 'replica/dashboard/blip/blip_list.html', ctx)
def EditBlip(request, blip_guid):
#Lets a user edit a blip they've previously added.
blip = get_object_or_404(Blip, guid=blip_guid, user=request.user)
f = BlipModelForm(request.POST or None, instance=blip)
if f.is_valid():
f.save()
return redirect('Replica:Blip:Blip', blip_guid=blip_guid)
ctx = {'form': f, 'blip': blip, 'adding': False}
return render(request, 'replica/dashboard/blip/edit_blip.html', ctx)
def DeleteBlip(request, blip_guid):
blip = get_object_or_404(Blip, guid=blip_guid, user=request.user)
if request.method == 'POST':
blip.delete()
return redirect('Replica:Blip:Index')
return render(request, 'replica/dashboard/delete-confirm.html', {'object': blip, 'content_type': 'Blip'})
|
thombashi/typepy | typepy/checker/_datetime.py | Python | mit | 1,159 | 0.000863 | """
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from datetime import date, datetime
from ._checker import CheckerFactory, TypeCheckerBase, TypeCheckerDelegator
from ._common import isstring
class DateTimeTypeCheckerStrictLevel0(TypeCheckerBase):
def is_instance(self):
return isinstance(self._value, (date, datetime))
class DateTimeTypeCheckerStrictLevel1(DateTimeTypeCheckerStrictLevel0):
def is_exclude_instance(self):
from ..type._integer import Integer
# exclude timestamp
return Integer(self._value, strict_level=1).is_type()
class DateTimeTypeCheckerStrictLevel2(DateTimeTypeCheckerStrictLevel1):
def is_exclude_instance(self):
return isstring(self._value) or super().is_exclude_instance()
_fact | ory = CheckerFactory(
checker_mapping={
0: DateTimeTypeCheckerStrictLevel0,
1: DateTimeTypeCheckerStrictLevel1,
2: DateTimeTypeCheckerStrictLevel2,
}
)
class DateTimeTypeC | hecker(TypeCheckerDelegator):
def __init__(self, value, strict_level):
super().__init__(value=value, checker_factory=_factory, strict_level=strict_level)
|
synox/telewall | telewall/telewall/__init__.py | Python | gpl-3.0 | 290 | 0.003448 | """ Telewall. This application connects to asterisk and provides Statis-Apps.
It has a user interface using a button, display and led. It also provides
a web interface.
The Application should be run using python 2 | .7, because ARI (Asterisk REST Interface) does not support pytho | n 3.
"""
|
unkyulee/elastic-cms | src/web/__init__.py | Python | mit | 2,565 | 0.011306 | # Initialize web application and setup routing
from flask import Flask, request, session, render_template, send_from_directory
app = Flask(__name__) # Define the WSGI application object
# very first run will not have config.py
try: app.config.from_object('config')
except: pass
import os
import web.util.jinja # Initialize jinja custom filters
import web.util.bootstrap as boot # Initialize web
import web.util.web_mod as mod # Module locator
import web.util.rev_proxy as rev # Reverse Proxy
import web.util.tools as tools
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'),
'favicon.ico', mimetype='image/vnd.microsoft.icon')
# Setup Routing - catch all
@app.route('/', defaults={'path':''}, methods=['GET','POST','PUT','DELETE','HEAD'])
@app.route('/<path:path>', methods=['GET','POST','PUT','DELETE','HEAD'])
@boot.require_install
def index(path):
# capture all the exceptions and send email
try:
# split navigation
nav = path.split("/")
# determine the navigation with given url
navigation = mod.define(app.config.get("HOST"), nav)
if not navigation['module']:
return "page not found", 404
# authenticate
req_auth = boot.requires_authentication(navigation)
authenticated = boot.is_authe | nticated()
if req_auth and not authenticated:
# build payload for the navigation
navigation = mod.build_payload(app.config, request, navigation)
navigation['operation'] = None
import web.modules.auth.control as default
return default.get(navigation)
| # build payload for the navigation
navigation = mod.build_payload(app.config, request, navigation)
# check for rev_proxy
rev_proxy_rule = rev.is_reverse_proxy(app.config.get("HOST"), request.path)
if rev_proxy_rule:
return rev.rev_proxy(app.config.get("HOST"), request.path, rev_proxy_rule)
# get module
response = mod.get_module(navigation)
# check for Authorization
if req_auth and not response.authorize(navigation):
return render_template("error/401.html", p=navigation), 401
# render based on the navigation info
return response.get(navigation)
except:
# don't send email when debug mode is on
if app.config.get("DEBUG"):
raise
# internal server error
return mod.handle_exception(app.config.get("HOST"))
|
Octane70/BluePad | bluepad/utils.py | Python | mit | 4,859 | 0.00638 | from __future__ import absolute_import, print_function, unicode_literals
import dbus
import time
import sys
SERVICE_NAME = "org.bluez"
ADAPTER_INTERFACE = SERVICE_NAME + ".Adapt | er1"
DEVICE_INTERFACE = SERVICE_NAME + ".Device1"
PROFILE_MANAGER = SERVICE_NAME + ".ProfileManager1"
def get_managed_objects():
bus = dbus.SystemBus()
manager = dbus.Interface(bus.get_object(SERVICE_NAME, "/"), "org.freedesktop.DBus.ObjectManager")
return manager.GetManagedObjects()
def find_adapter(patte | rn=None):
return find_adapter_in_objects(get_managed_objects(), pattern)
def find_adapter_in_objects(objects, pattern=None):
bus = dbus.SystemBus()
for path, ifaces in objects.items():
adapter = ifaces.get(ADAPTER_INTERFACE)
if adapter is None:
continue
if not pattern or pattern == adapter["Address"] or path.endswith(pattern):
obj = bus.get_object(SERVICE_NAME, path)
return dbus.Interface(obj, ADAPTER_INTERFACE)
raise Exception("Bluetooth adapter {} not found".format(pattern))
def get_adapter_property(device_name, property):
bus = dbus.SystemBus()
adapter_path = find_adapter(device_name).object_path
adapter = dbus.Interface(bus.get_object(SERVICE_NAME, adapter_path),"org.freedesktop.DBus.Properties")
return adapter.Get(ADAPTER_INTERFACE, property)
def get_mac(device_name):
return get_adapter_property(device_name, "Address")
def get_adapter_powered_status(device_name):
powered = get_adapter_property(device_name, "Powered")
return True if powered else False
def get_adapter_discoverable_status(device_name):
discoverable = get_adapter_property(device_name, "Discoverable")
return True if discoverable else False
def get_adapter_pairable_status(device_name):
pairable = get_adapter_property(device_name, "Pairable")
return True if pairable else False
def get_paired_devices(device_name):
paired_devices = []
bus = dbus.SystemBus()
adapter_path = find_adapter(device_name).object_path
om = dbus.Interface(bus.get_object(SERVICE_NAME, "/"), "org.freedesktop.DBus.ObjectManager")
objects = om.GetManagedObjects()
for path, interfaces in objects.items():
if DEVICE_INTERFACE not in interfaces:
continue
properties = interfaces[DEVICE_INTERFACE]
if properties["Adapter"] != adapter_path:
continue
paired_devices.append((str(properties["Address"]), str(properties["Alias"])))
return paired_devices
def device_discoverable(device_name, discoverable):
bus = dbus.SystemBus()
adapter_path = find_adapter(device_name).object_path
adapter = dbus.Interface(bus.get_object(SERVICE_NAME, adapter_path),"org.freedesktop.DBus.Properties")
if discoverable:
value = dbus.Boolean(1)
else:
value = dbus.Boolean(0)
adapter.Set(ADAPTER_INTERFACE, "Discoverable", value)
def device_pairable(device_name, pairable):
bus = dbus.SystemBus()
adapter_path = find_adapter(device_name).object_path
adapter = dbus.Interface(bus.get_object(SERVICE_NAME, adapter_path),"org.freedesktop.DBus.Properties")
if pairable:
value = dbus.Boolean(1)
else:
value = dbus.Boolean(0)
adapter.Set(ADAPTER_INTERFACE, "Pairable", value)
def device_powered(device_name, powered):
bus = dbus.SystemBus()
adapter_path = find_adapter(device_name).object_path
adapter = dbus.Interface(bus.get_object(SERVICE_NAME, adapter_path),"org.freedesktop.DBus.Properties")
if powered:
value = dbus.Boolean(1)
else:
value = dbus.Boolean(0)
adapter.Set(ADAPTER_INTERFACE, "Powered", value)
def register_spp():
service_record = """
<?xml version="1.0" encoding="UTF-8" ?>
<record>
<attribute id="0x0001">
<sequence>
<uuid value="0x1101"/>
</sequence>
</attribute>
<attribute id="0x0004">
<sequence>
<sequence>
<uuid value="0x0100"/>
</sequence>
<sequence>
<uuid value="0x0003"/>
<uint8 value="1" name="channel"/>
</sequence>
</sequence>
</attribute>
<attribute id="0x0100">
<text value="Serial Port" name="name"/>
</attribute>
</record>
"""
bus = dbus.SystemBus()
manager = dbus.Interface(bus.get_object(SERVICE_NAME, "/org/bluez"), PROFILE_MANAGER)
path = "/bluez"
uuid = "00001101-0000-1000-8000-00805f9b34fb"
opts = {
"AutoConnect" : True,
"ServiceRecord" : service_record
}
manager.RegisterProfile(path, uuid, opts)
if sys.version_info[0] > 2:
def string_to_bytes(data, encoding):
return bytes(data, encoding=encoding)
else:
def string_to_bytes(data, encoding):
data.encode(encoding)
return bytes(data)
|
Azure/azure-sdk-for-python | sdk/formrecognizer/azure-ai-formrecognizer/samples/v3.2-beta/sample_build_model.py | Python | mit | 2,477 | 0.004037 | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright | (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_build_model.py
DESCRIPTION:
This sample demonstrates how to build a model. For this sample, you can use the training
documents found in https://aka.ms/azsdk/formrecognizer/sampletrainingfiles
More details on setting up a container and required fil | e structure can be found here:
https://aka.ms/azsdk/formrecognizer/buildtrainingset
USAGE:
python sample_build_model.py
Set the environment variables with your own values before running the sample:
1) AZURE_FORM_RECOGNIZER_ENDPOINT - the endpoint to your Cognitive Services resource.
2) AZURE_FORM_RECOGNIZER_KEY - your Form Recognizer API key
3) CONTAINER_SAS_URL - The shared access signature (SAS) Url of your Azure Blob Storage container with your training files.
"""
import os
def sample_build_model():
# [START build_model]
from azure.ai.formrecognizer import DocumentModelAdministrationClient, DocumentBuildMode
from azure.core.credentials import AzureKeyCredential
endpoint = os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]
key = os.environ["AZURE_FORM_RECOGNIZER_KEY"]
container_sas_url = os.environ["CONTAINER_SAS_URL"]
document_model_admin_client = DocumentModelAdministrationClient(endpoint, AzureKeyCredential(key))
poller = document_model_admin_client.begin_build_model(
container_sas_url, DocumentBuildMode.TEMPLATE, description="my model description"
)
model = poller.result()
print("Model ID: {}".format(model.model_id))
print("Description: {}".format(model.description))
print("Model created on: {}\n".format(model.created_on))
print("Doc types the model can recognize:")
for name, doc_type in model.doc_types.items():
print("\nDoc Type: '{}' built with '{}' mode which has the following fields:".format(name, doc_type.build_mode))
for field_name, field in doc_type.field_schema.items():
print("Field: '{}' has type '{}' and confidence score {}".format(
field_name, field["type"], doc_type.field_confidence[field_name]
))
# [END build_model]
if __name__ == '__main__':
sample_build_model()
|
robcarver17/systematictradingexamples | plots_for_perhaps/plotcontango.py | Python | gpl-2.0 | 3,346 | 0.01853 | import pandas as pd
import datetime as dt
import Image
from random import gauss
import numpy as np
from matplotlib.pyplot import plot, show, xticks, xlabel, ylabel, legend, yscale, title, savefig, rcParams, figure, hist, scatter
import matplotlib.pylab as plt
from itertools import cycle
import pickle
import pandas as pd
lines = ["--","-","-."]
linecycler | = cycle(lines)
import Quandl
def get_quandl(code):
QUANDLDICT=dict(GOLD="COM/WLD_GOLD", CORN="COM/PMAIZMT_USD", CRUDE_W="C | OM/WLD_CRUDE_WTI")
authtoken='qWXuZcdwzwQ2GJQ88sNb'
quandldef=QUANDLDICT[code]
print quandldef
data = Quandl.get(quandldef, authtoken=authtoken)
data.columns=["value"]
return data.iloc[:,0]
def pd_readcsv(filename, date_index_name="DATETIME"):
"""
Reads a pandas data frame, with time index labelled
package_name(/path1/path2.., filename
:param filename: Filename with extension
:type filename: str
:param date_index_name: Column name of date index
:type date_index_name: list of str
:returns: pd.DataFrame
"""
ans = pd.read_csv(filename)
ans.index = pd.to_datetime(ans[date_index_name]).values
del ans[date_index_name]
ans.index.name = None
return ans
def get_spot_price(instrument_code):
datapath= "/home/rob/workspace3/pysystemtrade/sysdata/legacycsv/"
filename = datapath+ instrument_code + "_carrydata.csv"
instrcarrydata = pd_readcsv(filename)
return instrcarrydata.PRICE
def get_adj_price(instrument_code):
datapath= "/home/rob/workspace3/pysystemtrade/sysdata/legacycsv/"
filename = datapath+ instrument_code + "_price.csv"
instrprice = pd_readcsv(filename)
instrprice.columns=["value"]
return instrprice.iloc[:,0]
def get_interest():
authtoken='qWXuZcdwzwQ2GJQ88sNb'
quandldef='FRED/INTDSRUSM193N'
print quandldef
data = Quandl.get(quandldef, authtoken=authtoken)
data.columns=["value"]
return data.iloc[:,0]/1200.0
instrument_code="CORN"
start_date=dict(GOLD=pd.datetime(1975,1,1), CORN=pd.datetime(1982,04,30), CRUDE_W=pd.datetime(1987,12,31))[instrument_code]
data1=get_quandl(instrument_code)[start_date:]
perc_data1= (data1 - data1.shift(1))/data1
data2=get_adj_price(instrument_code).reindex(data1.index).ffill()
perc_data2= (data2 - data2.shift(1))/data2
irate=get_interest()
irate=irate.reindex(perc_data2.index,method="ffill")
perc_data2=perc_data2+irate
data1=perc_data1+1.0
data1=data1.cumprod()
data2=perc_data2+1.0
data2=data2.cumprod()
#data2 = data2 - (data2.irow(0) - data1.irow(0))
data3=data1-data2
data=pd.concat([data1, data2], axis=1).ffill()
data.columns=["Spot", "Future"]
data.plot(style=["g-", "b--"])
legend(loc="upper left")
frame=plt.gca()
#frame.get_yaxis().set_visible(False)
#frame.set_ylim([0,50000])
rcParams.update({'font.size': 18})
def file_process(filename):
fig = plt.gcf()
fig.set_size_inches(18.5,10.5)
fig.savefig("/home/rob/%s.png" % filename,dpi=300)
fig.savefig("/home/rob/%sLOWRES.png" % filename,dpi=50)
Image.open("/home/rob/%s.png" % filename).convert('L').save("/home/rob/%s.jpg" % filename)
Image.open("/home/rob/%sLOWRES.png" % filename).convert('L').save("/home/rob/%sLOWRES.jpg" % filename)
file_process("contango_%s" % instrument_code)
show()
data3.plot()
show()
|
alexellis/docker-arm | images/armhf/python2-envirophat.dev/pressure/pressure.py | Python | gpl-3.0 | 277 | 0.032491 | #!/usr/ | bin/env python
import sys
import time
from envirophat import light, weather, motion, analog
def write():
try:
p = round(weather.pressure(),2)
c = light.light()
print('{"light": '+str(c)+', "pressure": '+str( | p)+' }')
except KeyboardInterrupt:
pass
write()
|
jimmysong/bitcoin | test/functional/test_framework/mininode.py | Python | mit | 55,063 | 0.001925 | #!/usr/bin/env python3
# Copyright (c) 2010 ArtForz -- public domain half-a-node
# Copyright (c) 2012 Jeff Garzik
# Copyright (c) 2010-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Bitcoin P2P network half-a-node.
This python code was modified from ArtForz' public domain half-a-node, as
found in the mini-node branch of http://github.com/jgarzik/pynode.
NodeConn: an object which manages p2p connectivity to a bitcoin node
NodeConnCB: a base class that describes the interface for receiving
callbacks with network messages from a NodeConn
CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
data structures that should map to corresponding structures in
bitcoin/primitives
msg_block, msg_tx, msg_headers, etc.:
data structures that represent network messages
ser_*, deser_*: functions that handle serialization/deserialization
"""
import struct
import socket
import asyncore
import time
import sys
import random
from .util import hex_str_to_bytes, bytes_to_hex_str
from io import BytesIO
from codecs import encode
import hashlib
from threading import RLock
from threading import Thread
import logging
import copy
from test_framework.siphash import siphash256
BIP0031_VERSION = 60000
MY_VERSION = 70014 # past bip-31 for ping/pong
MY_SUBVERSION = b"/python-mininode-tester:0.0.3/"
MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37)
MAX_INV_SZ = 50000
MAX_BLOCK_BASE_SIZE = 1000000
COIN = 100000000 # 1 btc in satoshis
NODE_NETWORK = (1 << 0)
NODE_GETUTXO = (1 << 1)
NODE_BLOOM = (1 << 2)
NODE_WITNESS = (1 << 3)
logger = logging.getLogger("TestFramework.mininode")
# Keep our own socket map for asyncore, so that we can track disconnects
# ourselves (to workaround an issue with closing an asyncore socket when
# using select)
mininode_socket_map = dict()
# One lock for synchronizing all data access between the networking thread (see
# NetworkThread below) and the thread running the test logic. For simplicity,
# NodeConn acquires this lock whenever delivering a message to to a NodeConnCB,
# and whenever adding anything to the send buffer (in send_message()). This
# lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the NodeConnCB or NodeConn.
mininode_lock = RLock()
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
def ripemd160(s):
return hashlib.new('ripemd160', s).digest()
def hash256(s):
return sha256(sha256(s))
def ser_compact_size(l):
r = b""
if l < 253:
r = struct.pack("B", l)
elif l < 0x10000:
r = struct.pack("<BH", 253, l)
elif l < 0x100000000:
r = struct.pack("<BI", 254, l)
else:
r = struct.pack("<BQ", 255, l)
return r
def deser_compact_size(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return nit
def deser_string(f):
nit = deser_compact_size(f)
return f.read(nit)
def ser_string(s):
return ser_compact_size(len(s)) + s
def deser_uint256(f):
r = 0
for i in range(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = b""
for i in range(8):
rs += struct.pack("<I", u & 0xFFFFFFFF)
u >>= 32
return rs
def uint256_from_str(s):
r = 0
t = struct.unpack("<IIIIIIII", s[:32])
for i in range(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFF) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
# ser_function_name: Allow for an alternate serialization function on the
# entries in the vector (we use this for serializing the vector of transactions
# for a witness block).
def ser_vector(l, ser_function_name=None):
r = ser_compact_size(len(l))
for i in l:
if ser_function_name:
r += getattr(i, ser_function_name)()
else:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = ser_compact_size(len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = ser_compact_size(len(l))
for sv in l:
r += ser_string(sv)
return r
def deser_int_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = struct.unpack("<i", f.read(4))[0]
r.append(t)
return r
def ser_int_vector(l):
r = ser_compact_size(len(l))
for i in l:
r += struct.pack("<i", i)
return r
# Deserialize from a hex string representation (eg from RPC)
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(hex_str_to_bytes(hex_string)))
return obj
# Convert a binary-serializable object to hex (eg for submission via RPC)
def ToHex(obj):
return bytes_to_hex_str(obj.serialize())
# Objects that map to bitcoind objects, which can be serialized/deserialized
class CAddress(object):
def __init__(self):
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.in | et_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += | socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
MSG_WITNESS_FLAG = 1<<30
class CInv(object):
typemap = {
0: "Error",
1: "TX",
2: "Block",
1|MSG_WITNESS_FLAG: "WitnessTx",
2|MSG_WITNESS_FLAG : "WitnessBlock",
4: "CompactBlock"
}
def __init__(self, t=0, h=0):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator(object):
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" \
% (self.nVersion, repr(self.vHave))
class COutPoint(object):
def __init__(self, hash=0, n=0):
self.hash = hash
self.n = n
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self. |
aYukiSekiguchi/ACCESS-Chromium | native_client_sdk/src/build_tools/make_nacl_tools.py | Python | bsd-3-clause | 7,703 | 0.008049 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Build NaCl tools (e.g. sel_ldr and ncval) at a given revision."""
import build_utils
import optparse
import os
import shutil
import subprocess
import sys
import tempfile
bot = build_utils.BotAnnotator()
# The suffix used for NaCl moduels that are installed, such as irt_core.
NEXE_SUFFIX = '.nexe'
def MakeInstallDirs(options):
'''Create the necessary install directories in the SDK staging area.
'''
install_dir = os.path.join(options.toolchain, 'bin');
if not os.path.exists(install_dir):
os.makedirs(install_dir)
runtime_dir = os.path.join(options.toolchain, 'runtime');
if not os.path.exists(runtime_dir):
os.makedirs(runtime_dir)
def Build(options):
'''Build 32-bit and 64-bit versions of needed NaCL tools and libs.'''
nacl_dir = os.path.join(options.nacl_dir, 'native_client')
toolchain_option = 'naclsdk_mode=custom:%s' % options.toolchain
libc_option = '' if options.lib == 'newlib' else ' --nacl_glibc'
if sys.platform == 'win32':
scons = os.path.join(nacl_dir, 'scons.bat')
bits32 = 'vcvarsall.bat x86 && '
bits64 = 'vcvarsall.bat x86_amd64 && '
else:
scons = os.path.join(nacl_dir, 'scons')
bits32 = ''
bits64 = ''
# Build sel_ldr and ncval.
def BuildTools(prefix, bits, target):
cmd = '%s%s -j %s --mode=%s platform=x86-%s naclsdk_validate=0 %s %s%s' % (
prefix, scons, options.jobs, options.variant, bits, target,
toolchain_option, libc_option)
bot.Run(cmd, shell=True, cwd=nacl_dir)
BuildTools(bits32, '32', 'sdl=none sel_ldr ncval')
BuildTools(bits64, '64', 'sdl=none sel_ldr ncval')
# Build irt_core, which is needed for running .nexes with sel_ldr.
def BuildIRT(bits):
cmd = '%s -j %s irt_core --mode=opt-host,nacl platform=x86-%s %s' % (
scons, options.jobs, bits, toolchain_option)
bot.Run(cmd, shell=True, cwd=nacl_dir)
# only build the IRT using the newlib chain. glibc does not support IRT.
if options.lib == 'newlib':
BuildIRT(32)
BuildIRT(64)
# Build and install untrusted libraries.
def BuildAndInstallLibsAndHeaders(bits):
cmd = ('%s install --mode=opt-host,nacl libdir=%s includedir=%s '
'platform=x86-%s force_sel_ldr=none %s%s') % (
scons,
os.path.join(options.toolchain,
'x86_64-nacl',
'lib32' if bits == 32 else 'lib'),
os.path.join(options.toolchain, 'x86_64-nacl', 'include'),
bits,
toolchain_option,
libc_option)
bot.Run(cmd, shell=True, cwd=nacl_dir)
BuildAndInstallLibsAndHeaders(32)
BuildAndInstallLibsAndHeaders(64)
def Install(options, tools=[], runtimes=[]):
'''Install the NaCl tools and runtimes into the SDK staging area.
Assumes that all necessary artifacts are built into the NaCl scons-out/staging
directory, and copies them from there into the SDK staging area under
toolchain.
Args:
options: The build options object. This is populated from command-line
args at start-up.
tools: A list of tool names, these should *not* have any executable
suffix - this utility adds that (e.g. '.exe' on Windows).
runtimes: A list of IRT runtimes. These artifacts should *not* have any
suffix attached - this utility adds the '.nexe' suffix along with an
ISA-specific string (e.g. '_x86_32').
'''
# TODO(bradnelson): add an 'install' alias to the main build for this.
nacl_dir = os.path.join(options.nacl_dir, 'native_client')
tool_build_path_32 = os.path.join(nacl_dir,
'scons-out',
'%s-x86-32' % (options.variant),
'staging')
tool_build_path_64 = os.path.join(nacl_dir,
'scons-out',
'%s-x86-64' % (options.variant),
'staging')
for nacl_tool in tools:
shutil.copy(os.path.join(tool_build_path_32,
'%s%s' % (nacl_tool, options.exe_suffix)),
os.path.join(options.toolchain,
'bin',
'%s_x86_32%s' % (nacl_tool, options.exe_suffix)))
shutil.copy(os.path.join(tool_build_path_64,
'%s%s' % (nacl_tool, options.exe_suffix)),
os.path.join(options.toolchain,
'bin',
'%s_x86_64%s' % (nacl_tool, options.exe_suffix)))
irt_build_path_32 = os.path.join(nacl_dir,
'scons-out',
'nacl_irt-x86-32',
'staging')
irt_build_path_64 = os.path.join(nacl_dir,
'scons-out',
'nacl_irt-x86-64',
'staging')
for nacl_irt in runtimes:
shutil.copy(os.path.join(irt_build_path_32,
'%s%s' % (nacl_irt, NEXE_SUFFIX)),
os.path.join(options.toolchain,
'runtime',
'%s_x86_32%s' % (nacl_irt, NEXE_SUFFIX)))
shutil.copy(os.path.join(irt_build_path_64,
'%s%s' % (nacl_irt, NEXE_SUFFIX)),
os.path.join(options.toolchain,
'runtime',
'%s_x86_64%s' % (nacl_irt, NEXE_SUFFIX)))
def BuildNaClTools(options):
if(options.clean):
bot.Print('Removing scons-out')
scons_out = os.path.join(options.nacl_dir, 'native_client', 'scons-out')
build_utils.CleanDirectory(scons_out)
else:
MakeInstallDirs(options)
Build(options)
Install(options, tools=['sel_ldr', 'ncval'], runtimes=['irt_core'])
return 0
def main(argv):
if sys.platform in ['win32', 'cygwin']:
exe_suffix = '.exe'
else:
exe_suffix = ''
script_dir = os.path.abspath(os.path.dirname(__file__))
parser = optparse.OptionParser()
parser.add_option(
'-t', '--toolchain', dest='toolchain',
default='toolchain',
help='where to put the NaCl tool binaries')
parser.add_option(
'-l', '--lib', dest='lib',
default='newlib',
help='whether to build against newlib (default) or glibc')
parser.add_option(
'-c', '--clean', action='store_true', dest='clean',
default=False,
help='whether to clean up the checkout files')
parser.add_option(
'-j', '--jobs', dest='jobs', default='1',
help='Number of parallel jobs to use while building nacl tools')
parser.add_option(
'-n', '--nacl_dir', dest='nacl_dir',
default=os.path.join(script_dir, 'packages', 'native_client'),
help='Location of Native Client repository used for building tools')
(options, args) = parser.parse_args(argv)
if args:
parser.print_help()
bot.Print('ERROR: invalid argument(s): %s' % args)
return 1
options.toolchain = os.path.abspath(options.toolchain)
options.exe_suffix = exe_suffix
# Pick variant.
if sys.platform in ['win32', 'cygwin']:
variant = 'dbg-win'
elif sys.platform == 'darwin':
variant = 'dbg-mac'
elif sys.platform in ['linux', 'linux2']:
variant = 'dbg-linux'
else:
assert False
options.variant = variant
if options.lib not in ['newlib', 'glibc']:
bot. | Print('ERROR: --lib must either be newlib or glibc')
return 1
return Bu | ildNaClTools(options)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
odoomrp/odoomrp-wip | product_pricelist_rules/models/product.py | Python | agpl-3.0 | 3,039 | 0 | # -*- coding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free | Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be | useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, api, fields
class ProductTemplate(models.Model):
_inherit = "product.template"
@api.multi
def show_pricelists(self):
self.with_context(
{'search_default_pricelist_type_id': 1}).browse(self.ids)
result = self._get_act_window_dict(
'product_pricelist_rules.pricelist_items_action')
result['context'] = {'search_default_pricelist_type_id': 1,
'default_product_tmpl_id': self.id}
result['domain'] = [('product_tmpl_id', '=', self.id)]
return result
@api.multi
def _compute_count_pricelist(self):
pricelist_model = self.env['product.pricelist.item']
for record in self:
domain = [('product_tmpl_id', '=', record.id)]
record.count_pricelist = pricelist_model.search_count(domain)
count_pricelist = fields.Integer(string="Count Pricelist",
compute="_compute_count_pricelist")
class ProductProduct(models.Model):
_inherit = "product.product"
@api.multi
def show_pricelists(self):
res = super(self.product_tmpl_id.__class__,
self.product_tmpl_id).show_pricelists()
if res:
res['context'] = {'search_default_pricelist_type_id': 1,
'default_product_id': self.id}
res['domain'] = ['|', ('product_id', '=', self.id),
'&', ('product_tmpl_id', '=',
self.product_tmpl_id.id),
('product_id', '=', False)]
return res
@api.multi
def _compute_count_pricelist(self):
pricelist_model = self.env['product.pricelist.item']
for record in self:
domain = ['|', ('product_id', '=', record.id),
'&', ('product_tmpl_id', '=', record.product_tmpl_id.id),
('product_id', '=', False)]
record.count_pricelist = len(pricelist_model.search(domain))
count_pricelist = fields.Integer(string="Count Pricelist",
compute="_compute_count_pricelist")
|
odoousers2014/odoo_addons-2 | clv_frame/clv_tag/clv_tag.py | Python | agpl-3.0 | 2,011 | 0.013426 | # -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, | #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# | #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp import models, fields
class clv_frame(models.Model):
_inherit = 'clv_frame'
tag_ids = fields.Many2many('clv_tag',
'clv_frame_tag_rel',
'frame_id',
'tag_id',
'Tags')
class clv_tag(models.Model):
_inherit = 'clv_tag'
frame_ids = fields.Many2many('clv_frame',
'clv_frame_tag_rel',
'tag_id',
'frame_id',
'Frames')
|
getnikola/nikola | nikola/plugins/task/posts.py | Python | mit | 5,120 | 0.002149 | # -*- coding: utf-8 -*-
# Copyright © 2012-2022 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or su | bstantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILI | TY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Build HTML fragments from metadata and text."""
import os
from copy import copy
from nikola.plugin_categories import Task
from nikola import utils
def update_deps(post, lang, task):
"""Update file dependencies as they might have been updated during compilation.
This is done for example by the ReST page compiler, which writes its
dependencies into a .dep file. This file is read and incorporated when calling
post.fragment_deps(), and only available /after/ compiling the fragment.
"""
task.file_dep.update([p for p in post.fragment_deps(lang) if not p.startswith("####MAGIC####")])
class RenderPosts(Task):
"""Build HTML fragments from metadata and text."""
name = "render_posts"
def gen_tasks(self):
"""Build HTML fragments from metadata and text."""
self.site.scan_posts()
kw = {
"translations": self.site.config["TRANSLATIONS"],
"timeline": self.site.timeline,
"default_lang": self.site.config["DEFAULT_LANG"],
"show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
"demote_headers": self.site.config['DEMOTE_HEADERS'],
}
self.tl_changed = False
yield self.group_task()
def tl_ch():
self.tl_changed = True
yield {
'basename': self.name,
'name': 'timeline_changes',
'actions': [tl_ch],
'uptodate': [utils.config_changed({1: kw['timeline']})],
}
for lang in kw["translations"]:
deps_dict = copy(kw)
deps_dict.pop('timeline')
for post in kw['timeline']:
if not post.is_translation_available(lang) and not self.site.config['SHOW_UNTRANSLATED_POSTS']:
continue
# Extra config dependencies picked from config
for p in post.fragment_deps(lang):
if p.startswith('####MAGIC####CONFIG:'):
k = p.split('####MAGIC####CONFIG:', 1)[-1]
deps_dict[k] = self.site.config.get(k)
dest = post.translated_base_path(lang)
file_dep = [p for p in post.fragment_deps(lang) if not p.startswith("####MAGIC####")]
extra_targets = post.compiler.get_extra_targets(post, lang, dest)
task = {
'basename': self.name,
'name': dest,
'file_dep': file_dep,
'targets': [dest] + extra_targets,
'actions': [(post.compile, (lang, )),
(update_deps, (post, lang, )),
],
'clean': True,
'uptodate': [
utils.config_changed(deps_dict, 'nikola.plugins.task.posts'),
lambda p=post, l=lang: self.dependence_on_timeline(p, l)
] + post.fragment_deps_uptodate(lang),
'task_dep': ['render_posts:timeline_changes']
}
# Apply filters specified in the metadata
ff = [x.strip() for x in post.meta('filters', lang).split(',')]
flist = []
for i, f in enumerate(ff):
if not f:
continue
_f = self.site.filters.get(f)
if _f is not None: # A registered filter
flist.append(_f)
else:
flist.append(f)
yield utils.apply_filters(task, {os.path.splitext(dest)[-1]: flist})
def dependence_on_timeline(self, post, lang):
"""Check if a post depends on the timeline."""
if "####MAGIC####TIMELINE" not in post.fragment_deps(lang):
return True # No dependency on timeline
elif self.tl_changed:
return False # Timeline changed
return True
|
ShipleyCollege/ViPteam1 | ExtractAndAnalyzeCode/ProcessBranch.py | Python | gpl-3.0 | 1,199 | 0.035029 | from Node import *
from Pin import *
from Utilities import getQuotedString
###
# FUNCTION : Handle Branch nodes
###
def processBranch(lines, buildMode, nodeNumber, OUTPUT_FOLDER):
node = Node("Branch", buildMode)
inObject = False
pinName = ""
pinType = ""
pinSide = ""
for line in lines:
line = | line.strip(" ")
if inObject:
if line.startswith("PinName="):
print("-Name : " + line)
pinName = getQuotedString(line)
if line.startswith("PinType="):
print("-Type : " + line)
pinType = getQuotedString(line)
if line.startswith("Direction=EGPD_Output"):
print("-Direction : " + line)
pinSide = "Right"
if line.startswith("PinFriendlyName="):
print("Pin frei | ndly name : " + line)
pinName = getQuotedString(line)
if line.startswith("Begin Object Name="):
inObject = True
pinName = ""
PinType = ""
pinSide = "Left"
print("In Object")
if line.startswith("End Object"):
inObject = False
if pinSide != "":
pin = Pin(pinName, pinType)
node.addPin(pin, pinSide)
pinName = ""
PinType = ""
pinSide = ""
# print("Out Object")
# print(">" + line + "<");
print(node)
node.writeNode(nodeNumber, OUTPUT_FOLDER)
|
Glasgow2015/team-10 | env/lib/python2.7/site-packages/djangocms_text_ckeditor/utils.py | Python | apache-2.0 | 3,506 | 0.001141 | # -*- coding: utf-8 -*-
import os
import re
from cms.models import CMSPlugin
from django.core.files.storage import get_storage_class
from django.template.defaultfilters import force_escape
from django.utils.functional import LazyObject
OBJ_ADMIN_RE_PATTERN = r'<img [^>]*\bid="plugin_obj_(\d+)"[^>]*/?>'
OBJ_ADMIN_RE = re.compile(OBJ_ADMIN_RE_PATTERN)
def plugin_to_tag(obj):
return (
u'<img src="%(icon_src)s" alt="%(icon_alt)s" title="%(icon_alt)s"'
u'id="plugin_obj_%(id)d" />' % (
dict(
id=obj.id, icon_src=force_escape(obj.get_instance_icon_src()),
icon_alt=force_escape(obj.get_instance_icon_alt()),
)
)
)
def plugin_tags_to_id_list(text, regex=OBJ_ADMIN_RE):
ids = regex.findall(text)
return [int(id) for id in ids if id.isdigit()]
def plugin_tags_to_user_html(text, context, placeholder):
"""
Convert plugin object 'tags' into the form for public site.
context is the template context to use, placeholder is the placeholder name
"""
plugin_map = _plugin_dict(text)
def _render_tag(m):
plugin_id = int(m.groups()[0])
try:
obj = plugin_map[plugin_id]
obj._render_meta.text_enabled = True
except KeyError:
# Object must have been deleted. It cannot be rendered to
# end user so just remove it from the HTML altogether
return u''
return obj.render_plugin(context, placeholder)
return OBJ_ADMIN_RE.sub(_render_tag, text)
def replace_plugin_tags(text, id_dict):
def _replace_tag(m):
plugin_id = int(m.groups()[0])
new_id = id_dict.get(plugin_id)
try:
obj = CMSPlugin.objects.get(pk=new_id)
except CMSPlugin.DoesNotExist:
# Object must have been deleted. It cannot be rendered to
# end user, or edited, so just remove it from the HTML
# altogether
return u''
return (
u'<img src="%(icon_src)s" alt="%(icon_alt)s"'
u'title="%(icon_alt)s" id="plugin_obj_%(id)d" />' % (
dict(id=new_id,
icon_src=force_escape(obj.get_instance_icon_src()), |
icon_alt=force_escape(obj.get_instance_icon_alt()),
)
| )
)
return OBJ_ADMIN_RE.sub(_replace_tag, text)
def _plugin_dict(text, regex=OBJ_ADMIN_RE):
try:
from cms.utils.plugins import downcast_plugins
except ImportError:
from cms.plugins.utils import downcast_plugins
plugin_ids = plugin_tags_to_id_list(text, regex)
plugin_list = downcast_plugins(CMSPlugin.objects.filter(pk__in=plugin_ids), select_placeholder=True)
return dict((plugin.pk, plugin) for plugin in plugin_list)
"""
The following class is taken from https://github.com/jezdez/django/compare/feature/staticfiles-templatetag
and should be removed and replaced by the django-core version in 1.4
"""
default_storage = 'django.contrib.staticfiles.storage.StaticFilesStorage'
class ConfiguredStorage(LazyObject):
def _setup(self):
from django.conf import settings
self._wrapped = get_storage_class(getattr(settings, 'STATICFILES_STORAGE', default_storage))()
configured_storage = ConfiguredStorage()
def static_url(path):
'''
Helper that prefixes a URL with STATIC_URL and cms
'''
if not path:
return ''
return configured_storage.url(os.path.join('', path))
|
trevor/calendarserver | calendarserver/tools/checkdatabaseschema.py | Python | apache-2.0 | 7,687 | 0.003512 | ##
# Copyright (c) 2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from __future__ import print_function
from getopt import getopt, GetoptError
import os
import re
import subprocess
import sys
from twext.enterprise.dal.model | import Schema, Table, Column, Sequence
from twext.enterprise.dal.parseschema import addSQLToSchema, schemaFromPath
from twisted.python.filepath import FilePath
USERNAME = "caldav"
DATABASENAME = "caldav"
PGSOCKETDIR = "127.0.0.1"
SCHEMADIR = "./txdav/common/datastore/sql_schema/"
# Executables:
PSQL = "../postgresql/_root/bin/psql"
def usage(e=None):
name = os.path.basename(s | ys.argv[0])
print("usage: %s [options] username" % (name,))
print("")
print(" Check calendar server postgres database and schema")
print("")
print("options:")
print(" -d: path to server's sql_schema directory [./txdav/common/datastore/sql_schema/]")
print(" -k: postgres socket path (value for psql -h argument [127.0.0.1])")
print(" -p: location of psql tool if not on PATH already [psql]")
print(" -x: use default values for OS X server")
print(" -h --help: print this help and exit")
print(" -v --verbose: print additional information")
print("")
if e:
sys.stderr.write("%s\n" % (e,))
sys.exit(64)
else:
sys.exit(0)
def execSQL(title, stmt, verbose=False):
"""
Execute the provided SQL statement, return results as a list of rows.
@param stmt: the SQL to execute
@type stmt: L{str}
"""
cmdArgs = [
PSQL,
"-h", PGSOCKETDIR,
"-d", DATABASENAME,
"-U", USERNAME,
"-t",
"-c", stmt,
]
try:
if verbose:
print("\n{}".format(title))
print("Executing: {}".format(" ".join(cmdArgs)))
out = subprocess.check_output(cmdArgs, stderr=subprocess.STDOUT)
if verbose:
print(out)
except subprocess.CalledProcessError, e:
if verbose:
print(e.output)
raise CheckSchemaError(
"%s failed:\n%s (exit code = %d)" %
(PSQL, e.output, e.returncode)
)
return [s.strip() for s in out.splitlines()[:-1]]
def getSchemaVersion(verbose=False):
"""
Return the version number for the schema installed in the database.
Raise CheckSchemaError if there is an issue.
"""
out = execSQL(
"Reading schema version...",
"select value from calendarserver where name='VERSION';",
verbose
)
try:
version = int(out[0])
except ValueError, e:
raise CheckSchemaError(
"Failed to parse schema version: %s" % (e,)
)
return version
def dumpCurrentSchema(verbose=False):
schema = Schema("Dumped schema")
tables = {}
# Tables
rows = execSQL(
"Schema tables...",
"select table_name from information_schema.tables where table_schema = 'public';",
verbose
)
for row in rows:
name = row
table = Table(schema, name)
tables[name] = table
# Columns
rows = execSQL(
"Reading table '{}' columns...".format(name),
"select column_name from information_schema.columns where table_schema = 'public' and table_name = '{}';".format(name),
verbose
)
for row in rows:
name = row
# TODO: figure out the type
column = Column(table, name, None)
table.columns.append(column)
# Indexes
# TODO: handle implicit indexes created via primary key() and unique() statements within CREATE TABLE
rows = execSQL(
"Schema indexes...",
"select indexdef from pg_indexes where schemaname = 'public';",
verbose
)
for indexdef in rows:
addSQLToSchema(schema, indexdef.replace("public.", ""))
# Sequences
rows = execSQL(
"Schema sequences...",
"select sequence_name from information_schema.sequences where sequence_schema = 'public';",
verbose
)
for row in rows:
name = row
Sequence(schema, name)
return schema
def checkSchema(dbversion, verbose=False):
"""
Compare schema in the database with the expected schema file.
"""
dbschema = dumpCurrentSchema(verbose)
# Find current schema
fp = FilePath(SCHEMADIR)
fpschema = fp.child("old").child("postgres-dialect").child("v{}.sql".format(dbversion))
if not fpschema.exists():
fpschema = fp.child("current.sql")
expectedSchema = schemaFromPath(fpschema)
mismatched = dbschema.compare(expectedSchema)
if mismatched:
print("\nCurrent schema in database is mismatched:\n\n" + "\n".join(mismatched))
else:
print("\nCurrent schema in database is a match to the expected server version")
class CheckSchemaError(Exception):
pass
def error(s):
sys.stderr.write("%s\n" % (s,))
sys.exit(1)
def main():
try:
(optargs, _ignore_args) = getopt(
sys.argv[1:], "d:hk:vx", [
"help",
"verbose",
],
)
except GetoptError, e:
usage(e)
verbose = False
global SCHEMADIR, PGSOCKETDIR, PSQL
for opt, arg in optargs:
if opt in ("-h", "--help"):
usage()
elif opt in ("-d",):
SCHEMADIR = arg
elif opt in ("-k",):
PGSOCKETDIR = arg
elif opt in ("-p",):
PSQL = arg
elif opt in ("-x",):
sktdir = FilePath("/var/run/caldavd")
for skt in sktdir.children():
if skt.basename().startswith("ccs_postgres_"):
PGSOCKETDIR = skt.path
PSQL = "/Applications/Server.app/Contents/ServerRoot/usr/bin/psql"
SCHEMADIR = "/Applications/Server.app/Contents/ServerRoot/usr/share/caldavd/lib/python/txdav/common/datastore/sql_schema/"
elif opt in ("-v", "--verbose"):
verbose = True
else:
raise NotImplementedError(opt)
# Retrieve the db_version number of the installed schema
try:
db_version = getSchemaVersion(verbose=verbose)
except CheckSchemaError, e:
db_version = 0
# Retrieve the version number from the schema file
currentschema = FilePath(SCHEMADIR).child("current.sql")
try:
data = currentschema.getContent()
except IOError:
print("Unable to open the current schema file: %s" % (currentschema.path,))
else:
found = re.search("insert into CALENDARSERVER values \('VERSION', '(\d+)'\);", data)
if found is None:
print("Schema is missing required schema VERSION insert statement: %s" % (currentschema.path,))
else:
current_version = int(found.group(1))
if db_version == current_version:
print("Schema version {} is current".format(db_version))
else: # upgrade needed
print("Schema needs to be upgraded from {} to {}".format(db_version, current_version))
checkSchema(db_version, verbose)
if __name__ == "__main__":
main()
|
MarcoStucchi/ModbusTCP-Client-Server | device/device.py | Python | mit | 2,774 | 0.01478 | #Standard libraries import
from thread import *
#Local import
import tcp_server
device_repository = [
{'type': 'new emax',
'dictionary' : [['Current I1', 2000, 2, 0], ['Current I2', 2002, 2, 0],
['Current I3', 2004, 2, 0], ['Current Ne', 2006, 2, 0]]},
{'type': 'emax2',
'dictionary': [['Current I1', 2000, 2, 0], ['Current I2', 2002, 2, 0],
['Current I3', 2004, 2, 0], ['Current N | e', 2006, 2, 0]]}
]
##Device class.
# This class loads a device object dictionary and manage the interface with the external worls
class Device:
name = '' ##Device name freely assigned by user
type = '' ##Device type, co | mpared with the repository
object_dictionary = [] ##Object Dictionary
object_dictionary_columns = \
('Name', 'ModbusAddress', 'Size') ##Dictionary columns
##Constructor method
def __init__(self, name = 'Generic device', type = 'emax2', diagnostics = False):
#Assigning device name and type
self.name = name
self.type = type
server = 0
#Generating Object Dictionary
for device_item in device_repository:
#Look for device
if device_item['type'] == self.type:
#Matching device found !!
if diagnostics:
print 'Device instantiated: ' + self.name + ' (' + self.type + ')'
#Loading device dictionary
for entry in device_item['dictionary']:
self.object_dictionary.append(dict(zip(self.object_dictionary_columns, entry)))
if diagnostics:
print self.object_dictionary[-1]
## Create a thread for the device
start_new_thread(self.device_thread, ())
## Quit loop
break
##Device run-time management
def device_thread(self):
print 'Device thread started..'
## Create a thread for the device
start_new_thread(self.device_server_thread, ())
while True:
#Execute device run-time..
pass
##Device server thread - required since it's typically blocked listening for connections
def device_server_thread(self):
print 'Device server thread started..'
#Creating ModbusTCP server
self.server = tcp_server.ModbusTCPServer(self.name, 502, True)
#Creating the ModbusTCP server thread
self.server.listen_on_connections()
##Top-level detection
if __name__ == '__main__':
device = Device('CB building', 'emax2', True)
while True:
pass
|
kubeflow/pipelines | components/aws/sagemaker/common/generate_components.py | Python | apache-2.0 | 4,875 | 0.000821 | #!/usr/bin/env python3
"""A command line tool for generating component specification files."""
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from common.component_compiler import SageMakerComponentCompiler
import common.sagemaker_component as component_module
COMPONENT_DIRECTORIES = [
"batch_transform",
"create_simulation_app",
"delete_simulation_app",
"deploy",
"ground_truth",
"hyperparameter_tuning",
"model",
"process",
"rlestimator",
"simulation_job",
"simulation_job_batch",
"train",
"workteam",
]
def parse_arguments():
"""Parse command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument(
"--tag", type=str, required=True, help="The component container tag."
)
parser.add_argument(
"--image",
type=str,
required=False,
default="amazon/aws-sagemaker-kfp-components",
help="The component container image.",
)
parser.add_argument(
"--check",
type=bool,
required=False,
default=False,
help="Dry-run to compare against the existing files.",
)
args = parser.parse_args()
return args
class ComponentCollectorContext:
"""Context for collecting components registered using their decorators."""
def __enter__(self):
component_specs = []
def add_component(func):
component_specs.append(func)
return func
# Backup previous handler
self.old_handler = component_module._component_decorator_handler
component_module._component_decorator_handler = add_component
return component_specs
def __exit__(self, *args):
component_module._component_decorator_handler = self.old_handler
def compile_spec_file(component_file, spec_dir, args):
"""Attempts to compile a component specification file into a YAML spec.
Writes a `component.yaml` file into a file one directory above where the
specification file exists. For example if the spec is in `/my/spec/src`,
it will create a file `/my/spec/component.yaml`.
Args:
component_file: A path to a component definition.
spec_dir: The path containing the specification.
args: Optional arguments as defined by the command line.
check: Dry-run and check that the files match the expected output.
"""
output_path = Path(spe | c_dir.parent, "component.yaml")
relative_path = component_file.relative_to(root)
# Remove extension
relative_module = os.path.splitext(str(relative_path))[0]
with ComponentCollectorContext() as component_metadatas:
# Import the file using the path relative to the root
__import__(relative_module.replace("/", "." | ))
if len(component_metadatas) != 1:
raise ValueError(
f"Expected exactly 1 ComponentMetadata in {component_file}, found {len(component_metadatas)}"
)
if args.check:
return SageMakerComponentCompiler.check(
component_metadatas[0],
str(relative_path),
str(output_path.resolve()),
component_image_tag=args.tag,
component_image_uri=args.image,
)
SageMakerComponentCompiler.compile(
component_metadatas[0],
str(relative_path),
str(output_path.resolve()),
component_image_tag=args.tag,
component_image_uri=args.image,
)
if __name__ == "__main__":
import os
from pathlib import Path
args = parse_arguments()
cwd = Path(os.path.join(os.getcwd(), os.path.dirname(__file__)))
root = cwd.parent
for component in COMPONENT_DIRECTORIES:
component_dir = Path(root, component)
component_src_dir = Path(component_dir, "src")
components = sorted(component_src_dir.glob("*_component.py"))
if len(components) < 1:
raise ValueError(f"Unable to find _component.py file for {component}")
elif len(components) > 1:
raise ValueError(f"Found multiple _component.py files for {component}")
result = compile_spec_file(components[0], component_src_dir, args)
if args.check and result:
print(result)
raise ValueError(
f'Difference found between to the existing spec for the "{component}" component'
)
|
bleachbit/bleachbit | bleachbit/Action.py | Python | gpl-3.0 | 22,786 | 0.001097 | # vim: ts=4:sw=4:expandtab
# -*- coding: UTF-8 -*-
# BleachBit
# Copyright (C) 2008-2021 Andrew Ziem
# https://www.bleachbit.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Actions that perform cleaning
"""
from bleachbit import Command, FileUtilities, General, Special, DeepScan
from bleachbit import _, fs_scan_re_flags
import glob
import logging
import os
import re
if 'posix' == os.name:
from bleachbit import Unix
logger = logging.getLogger(__name__)
def has_glob(s):
"""Checks whether the string contains any glob characters"""
return re.search('[?*\[\]]', s) is not None
def expand_multi_var(s, variables):
"""Expand strings with potentially-multiple values.
The placeholder is written in the format $$foo$$.
The function always returns a list of one or more strings.
"""
if not variables or s.find('$$') == -1:
# The input string is missing $$ or no variables are given.
return (s,)
var_keys_used = []
ret = []
for var_key in variables.keys():
sub = '$$%s$$' % var_key
if s.find(sub) > -1:
var_keys_used.append(var_key)
if not var_keys_used:
# No matching variables used, so return input string unmodified.
return (s,)
# filter the dictionary to the keys used
vars_used = {key: value for key,
value in variables.items() if key in var_keys_used}
# create a product of combinations
from itertools import product
vars_product = (dict(zip(vars_used, x))
for x in product(*vars_used.values()))
for var_set in vars_product:
ms = s # modified version of input string
for var_key, var_value in var_set.items():
sub = '$$%s$$' % var_key
ms = ms.replace(sub, var_value)
ret.append(ms)
if ret:
return ret
else:
# The string has $$, but it did not match anything
return (s,)
#
# Plugin framework
# http://martyalchin.com/2008/jan/10/simple-plugin-framework/
#
class PluginMount(type):
"""A simple plugin framework"""
def __init__(cls, name, bases, attrs):
if not hasattr(cls, 'plugins'):
cls.plugins = []
else:
cls.plugins.append(cls)
class ActionProvider(metaclass=PluginMount):
"""Abstract base class for performing individual cleaning actions"""
def __init__(self, action_node, path_vars=None):
"""Create ActionProvider from CleanerML <action>"""
pass
def get_deep_scan(self):
"""Return a dictionary used to construct a deep scan"""
raise StopIteration
def get_commands(self):
"""Yield each command (which can be previewed or executed)"""
pass
#
# base class
#
class FileActionProvider(ActionProvider):
"""Base class for providers which work on individual files"""
action_key = '_file'
CACHEABLE_SEARCHERS = ('walk.files',)
# global cache <search_type, path, list_of_entries>
cache = ('nothing', '', tuple())
def __init__(self, action_element, path_vars=None):
"""Initialize file search"""
ActionProvider.__init__(self, action_element, path_vars)
self.regex = action_element.getAttribute('regex')
assert(isinstance(self.regex, (str, type(None))))
self.nregex = action_element.getAttribute('nregex')
assert(isinstance(self.nregex, (str, type(None))))
self.wholeregex = action_element.getAttribute('wholeregex')
assert(isinstance(self.wholeregex, (str, type(None))))
self.nwholeregex = action_element.getAttribute('nwholeregex')
assert(isinstance(self.nwholeregex, (str, type(None))))
self.search = action_element.getAttribute('search')
self.object_type = action_element.getAttribute('type')
self._set_paths(action_element.getAttribute('path'), path_vars)
self.ds = None
if 'deep' == self.search:
self.ds = (self.paths[0], DeepScan.Search(
command=action_element.getAttribute('command'),
regex=self.regex, nregex=self.nregex,
wholeregex=self.wholeregex, nwholeregex=self.nwholeregex))
if not len(self.paths) == 1:
logger.warning(
# TRANSLATORS: Multi-value variables are explained in the online documentation.
# Basically, they are like an environment variable, but each multi-value variable
# can have multiple values. They're a way to make CleanerML files more concise.
_("Deep scan does not support multi-value variable."))
if not any([self.object_type, self.regex, self.nregex,
self.wholeregex, self.nwholeregex]):
# If the filter is not needed, bypass it for speed.
self.get_paths = self._get_paths
def _set_paths(self, raw_path, path_vars):
"""Set the list of paths to work on"""
self.paths = []
# expand special $$foo$$ which may give multiple values
for path2 in expand_multi_var(raw_path, path_vars):
path3 = os.path.expanduser(os.path.expandvars(path2))
if os.name == 'nt' and path3:
# convert forward slash to backslash for compatibility with getsize()
# and for display. Do not convert an empty path, or it will become
# the current directory (.).
path3 = os.path.normpath(path3)
self.paths.append(path3)
def get_deep_scan(self):
if self.ds is None:
return
yield self.ds
def get_paths(self):
"""Process the filters: regex, nregex, type
If a filter is defined and it fails to match, this function
returns False. Otherwise, this function returns True."""
# optimize tight loop, avoid slow python "."
regex = self.regex
nregex = self.nregex
wholeregex = self.wholeregex
nwholeregex = self.nwholeregex
basename = os.path.basename
object_type = self.object_type
if self.regex:
regex_c_search = re.compile(self.regex, fs_scan_re_flags).search
else:
regex_c_search = None
if self.nregex:
nregex_c_search = re.compile(self.nregex, fs_scan_re_flags).search
else:
nregex_c_search = None
if self.wholeregex:
wholeregex_c_search = re.compile(self.wholeregex, fs_scan_re_flags).search
else:
wholeregex_c_search = None
if self.nwholeregex:
nwholeregex_c_search = re.compile(
self.nwholeregex, fs_scan_re_flags).search
else:
nwholeregex_c_search = None
for path in self._get_paths():
if regex and not regex_c_search(basename(path)):
continue
if nregex and nregex_c_search(basename(path)):
continue
if wholeregex and not wholeregex_c_search(path):
continue
if nwholeregex and nwholeregex_c_search(path):
continue
if object_type:
| if 'f' == object_type and not os.path.isfile(path):
continue
elif 'd' == object_type and not os.path.isdir(path):
continue
yield path
def _get_paths(sel | f):
"""Return a filtered list of files"""
def get_file(path):
if os.path.lexists(path):
yield path
def get_walk_all(top):
"""D |
bmazin/ARCONS-pipeline | examples/skyAnalysis/statsExplorer.py | Python | gpl-2.0 | 12,388 | 0.016306 | from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4 import QtGui
import matplotlib.pyplot as plt
import matplotlib
import matplotlib.cm as cm
import numpy as np
| import sys
import os
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QTAgg as NavigationToolbar
from matplotlib.figure import Figure
from mpl_toolkits.mplot3d import Axes3D
from util.ObsFile import ObsFile
from util.FileName import FileName
from util.readDict import readDict
from util.popup import PopUp
from hotpix import hotPixelsMatt as hotPixels
d | ef mad(a,axis=-1):
if axis==-1:
return 1.4826*np.median(np.abs(a-np.median(a)))
elif axis == 0:
return 1.4826*np.median(np.abs(a-np.median(a)))
class AppForm(QMainWindow):
def __init__(self, parent=None):
QMainWindow.__init__(self, parent)
self.setWindowTitle('Pixel Explorer')
paramFile = sys.argv[1]
self.params = readDict()
self.params.read_from_file(paramFile)
self.createMainFrame()
self.createStatusBar()
def createMainFrame(self):
self.main_frame = QWidget()
# Create the mpl Figure and FigCanvas objects.
self.dpi = 100
self.fig = Figure((7.0, 7.0), dpi=self.dpi)
self.canvas = FigureCanvas(self.fig)
self.canvas.setParent(self.main_frame)
self.axes0 = self.fig.add_subplot(111)
cid=self.canvas.mpl_connect('button_press_event', self.clickCanvas)
# Create the navigation toolbar, tied to the canvas
self.mpl_toolbar = NavigationToolbar(self.canvas, self.main_frame)
vbox = QVBoxLayout()
vbox.addWidget(self.canvas)
vbox.addWidget(self.mpl_toolbar)
self.main_frame.setLayout(vbox)
self.setCentralWidget(self.main_frame)
def createStatusBar(self):
self.status_text = QLabel("Awaiting orders.")
self.statusBar().addWidget(self.status_text, 1)
def openImage(self):
timestampList = [self.params['obsUtcDate']+'-'+ts for ts in self.params['obsSequence']]
run = self.params['run']
sunsetDate = self.params['obsSunsetDate']
utcDate = self.params['obsUtcDate']
self.intTime = self.params['intTime']
wvlLowerCutoff = self.params['wvlLowerCutoff']
wvlUpperCutoff = self.params['wvlUpperCutoff']
calTimestamp = self.params['wvlTimestamp']
wfn = FileName(run=run,date=sunsetDate,tstamp=calTimestamp).calSoln()
calfn = FileName(run=run,date=self.params['wvlSunsetDate'],tstamp=calTimestamp).cal()
ffn = FileName(run=run,date=self.params['flatCalSunsetDate'],tstamp='').flatSoln()
obsFns = [FileName(run=run,date=sunsetDate,tstamp=timestamp).obs() for timestamp in timestampList]
self.obList = [ObsFile(obsFn) for obsFn in obsFns]
for ob in self.obList:
print 'Loading ',ob.fullFileName
ob.loadWvlCalFile(wfn)
ob.loadFlatCalFile(ffn)
self.cal = ObsFile(calfn)
self.cal.loadWvlCalFile(wfn)
self.cal.loadFlatCalFile(ffn)
self.loadSpectra()
def loadSpectra(self):
fileName = self.params['outFileName']
if os.path.exists(fileName):
data = np.load(fileName)
self.spectra = data['cube']
self.frame = data['frame']
self.frameValues = self.frame[~np.isnan(self.frame)]
self.wvlBinEdges = np.array(self.obList[0].flatCalWvlBins)
self.frameIntTime = 300*6
else:
self.spectra,self.wvlBinEdges = self.obList[0].getSpectralCube(weighted=True)
self.frameIntTime = 0
for ob in self.obList[1:]:
print ob.fileName
cube,wvlBinEdges = ob.getSpectralCube(weighted=True)
self.spectra += cube
self.frameIntTime += ob.getFromHeader('exptime')
self.spectra = np.array(self.spectra,dtype=np.float64)
self.frame = np.sum(self.spectra,axis=2)
hotPixMask = hotPixels.findHotPixels(image=self.frame,nsigma=2)['badflag']
self.frame[hotPixMask != 0] = np.nan
self.frame[self.frame == 0] = np.nan
self.frameValues = self.frame[~np.isnan(self.frame)]
np.savez(fileName,cube=self.spectra,frame=self.frame)
def plotWeightedImage(self):
self.showFrame = np.array(self.frame)
self.showFrame[np.isnan(self.frame)] = 0
handleMatshow = self.axes0.matshow(self.showFrame,cmap=matplotlib.cm.gnuplot2,origin='lower',vmax=np.mean(self.showFrame)+3*np.std(self.showFrame))
self.fig.colorbar(handleMatshow)
def clickCanvas(self,event):
self.showLaserSpectrum = True
self.showPixelSpectrum = True
self.showWvlLightCurves = True
self.showWvlLightCurveHists = False
self.showStdVsIntTime = True
self.showNormStdVsIntTime = True
col = round(event.xdata)
row = round(event.ydata)
if self.showPixelSpectrum:
#next plot the integrated spectrum for this pixel in the total image
spectrum = self.spectra[row,col]
print sum(spectrum),' counts in broadband spectrum'
def plotFunc(fig,axes):
axes.plot(self.wvlBinEdges[:-1],spectrum)
axes.set_xlabel(r'$\lambda$ ($\AA$)')
axes.set_ylabel(r'total counts')
popup = PopUp(parent=self,plotFunc=plotFunc,title='spectrum, pixel %d,%d (intTime=%d)'%(row,col,self.frameIntTime))
rebinSpecBins = 5
firstAfterConvolve = rebinSpecBins//2
rebinnedWvlEdges = self.wvlBinEdges[::rebinSpecBins]
if self.showLaserSpectrum:
#First plot the laser cal spectrum for this pixel to see if it's good
laserSpectrum,binEdges = self.cal.getPixelSpectrum(row,col,weighted=True)
def plotFunc(fig,axes):
axes.plot(binEdges[:-1],laserSpectrum)
axes.set_xlabel(r'$\lambda$ ($\AA$)')
axes.set_ylabel(r'total counts')
popup = PopUp(parent=self,plotFunc=plotFunc,title='Laser Cal Spectrum, pixel %d,%d'%(row,col))
if self.showWvlLightCurves:
spectrumInTime = []
for iOb,ob in enumerate(self.obList):
for sec in range(0,ob.getFromHeader('exptime'),self.intTime):
spectrum,binEdges = ob.getPixelSpectrum(pixelRow=row,pixelCol=col,firstSec=sec,integrationTime=self.intTime,weighted=True)
spectrum = np.convolve(spectrum,np.ones(rebinSpecBins),'same')[firstAfterConvolve::rebinSpecBins]
spectrumInTime.append(spectrum)
spectrumInTime = np.array(spectrumInTime)
nBins = np.shape(spectrumInTime)[1]
def plotFunc(fig,axes):
#plot counts vs time for each wavelength bin
t=np.arange(len(spectrumInTime[:,0]))*self.intTime
for iBin in xrange(nBins):
axes.plot(t,1.0*spectrumInTime[:,iBin]/self.intTime,
c=cm.jet((iBin+1.)/nBins),
label=r'%d-%d $\AA$'%(rebinnedWvlEdges[iBin],
rebinnedWvlEdges[iBin+1]))
axes.set_xlabel('time (s)')
axes.set_ylabel('cps')
#plot counts vs time summed over all wavelengths
axes.plot(t,np.sum(spectrumInTime,axis=1)/self.intTime,c='k',
label=r'%d-%d $\AA$'%(rebinnedWvlEdges[0],rebinnedWvlEdges[-1]))
#axes.legend(loc='center right')
popup = PopUp(parent=self,plotFunc=plotFunc,title='Light Curve by Band, Pixel %d,%d'%(row,col))
if self.showWvlLightCurveHists or self.showStdVsIntTime or self.showNormStdVsIntTime:
intTimes = [1,2,3,5,10,15,30]
spectrumVsIntTimeVsTime = []
for intTime in intTimes:
spectrumInTime = []
for iOb,ob in enumerate(self.obList):
for sec in range(0,ob.getF |
legacysurvey/rapala | bokpipe/tools/bokwcs.py | Python | bsd-3-clause | 1,410 | 0.021986 | #!/usr/bin/env/python
import sys
import argparse
from bokpipe.bokastrom import scamp_solve
parser = argparse.ArgumentParser()
parser.add_argument("image",type=str,
help="input FITS image")
parser.add_argument("catalog",type=str,
help="input FITS catalog")
parser.add_argument("-a","--args",type=str,
help="arguments to pass to scamp config")
parser.add_argument("-f","--filter",type=str,default='g',
help="reference band")
parser.add_argument("-p","--plots",action="store_true",
help="write check plots")
parser.add_argument("-r","--reference",type=str,default=None,
help="reference catalog")
parser.add_argument('-v','--verbose',action='count',
help='increase output verbosity')
parser.add_argument("-w","--write",action="store_true",
help="write WCS to image header")
parser.add_argument("--single",action="store_true",
help="single pass")
args = parser.parse_args()
kwargs = {}
if args.args is not None:
arglist = args.args.split()
for a in arglist:
k,v = a.split('=')
kwargs[k] = v
scamp_solve(args.image,args.ca | talog,refStarCatFile=args.reference,
filt=args.filter,savewcs=args.write,clobber=True,
check_plots=args.plots | ,twopass=not args.single,
verbose=args.verbose,**kwargs)
|
talapus/Ophidian | Academia/ListComprehensions/cubes_by_four.py | Python | bsd-3-clause | 107 | 0.009346 | #!/usr/bin/env python
cub | es_by_four = [x**3 for x in range(1,11) if (x**3) % 4 == 0]
print | cubes_by_four
|
clusterfudge/boomer | boomer/filesystem/__init__.py | Python | gpl-3.0 | 1,932 | 0 | # This file is part of Boomer Core.
#
# Boomer Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Boomer Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Boomer Core. If not, se | e <http://www.gnu.org/licenses/>.
#
# Forked from Mycroft Core on 2017-07-29
import os
from os.path import join, expanduser, isdir
__author__ = 'jdorleans'
class FileSystemAccess(object):
"""
A class for providing access to the boomer FS sandbox. Intended to be
attached to skills
at initialization time to provide a skill-specific n | amespace.
"""
def __init__(self, path):
self.path = self.__init_path(path)
@staticmethod
def __init_path(path):
if not isinstance(path, str) or len(path) == 0:
raise ValueError("path must be initialized as a non empty string")
path = join(expanduser('~'), '.boomer', path)
if not isdir(path):
os.makedirs(path)
return path
def open(self, filename, mode):
"""
Get a handle to a file (with the provided mode) within the
skill-specific namespace.
:param filename: a str representing a path relative to the namespace.
subdirs not currently supported.
:param mode: a file handle mode
:return: an open file handle.
"""
file_path = join(self.path, filename)
return open(file_path, mode)
def exists(self, filename):
return os.path.exists(join(self.path, filename))
|
PanDAWMS/panda-server | pandaserver/dataservice/AdderGen.py | Python | apache-2.0 | 39,116 | 0.005778 | '''
add data to dataset
'''
import os
import re
import sys
import time
import datetime
# import fcntl
import traceback
import xml.dom.minidom
import uuid
from pandaserver.config import panda_config
from pandacommon.pandalogger.PandaLogger import PandaLogger
from pandacommon.pandalogger.LogWrapper import LogWrapper
from pandaserver.taskbuffer import EventServiceUtils
from pandaserver.taskbuffer import retryModule
from pandaserver.taskbuffer import JobUtils
import pandaserver.taskbuffer.ErrorCode
import pandaserver.dataservice.ErrorCode
from pandaserver.dataservice import Closer
try:
long
except NameError:
long = int
# logger
_logger = PandaLogger().getLogger('Adder')
panda_config.setupPlugin()
class AdderGen(object):
# constructor
def __init__(self, taskBuffer, jobID, jobStatus, attemptNr, ignoreTmpError=True, siteMapper=None,
pid=None, prelock_pid=None, lock_offset=10):
self.job = None
self.jobID = jobID
self.jobStatus = jobStatus
self.taskBuffer = taskBuffer
self.ignoreTmpError = ignoreTmpError
self.lock_offset = lock_offset
self.siteMapper = siteMapper
self.datasetMap = {}
self.extraInfo = {'surl':{},'nevents':{},'lbnr':{},'endpoint':{}, 'guid':{}}
self.attemptNr = attemptNr
self.pid = pid
self.prelock_pid = prelock_pid
self.data = None
# logger
self.logger = LogWrapper(_logger,str(self.jobID))
# dump file report
def dumpFileReport(self, fileCatalog, attemptNr):
self.logger.debug("dump file report")
# dump Catalog into file
# if attemptNr is None:
# xmlFile = '%s/%s_%s_%s' % (panda_config.logdir,self.jobID,self.jobStatus,
# str(uuid.uuid4()))
# else:
# xmlFile = '%s/%s_%s_%s_%s' % (panda_config.logdir,self.jobID,self.jobStatus,
# str(uuid.uuid4()),attemptNr)
# file = open(xmlFile,'w')
# file.write(fileCatalog)
# file.close()
# dump Catalog into job output report table
attempt_nr = 0 if attemptNr is None else attemptNr
if self.job is None:
self.job = self.taskBuffer.peekJobs([self.jobID],fromDefined=False,
fromWaiting=False,
forAnal=True)[0]
if self.job:
self.taskBuffer.insertJobOutputReport(
panda_id=self.jobID, prod_source_label=self.job.prodSourceLabel,
job_status=self.jobStatus, attempt_nr=attempt_nr, data=fileCatalog)
# get plugin class
def getPluginClass(self, tmpVO):
# instantiate concrete plugin
adderPluginClass = panda_config.getPlugin('adder_plugins',tmpVO)
if adderPluginClass is None:
# use ATLAS plugin by default
from pandaserver.dataservice.AdderAtlasPlugin import AdderAtlasPlugin
adderPluginClass = AdderAtlasPlugin
self.logger.debug('plugin name {0}'.format(adderPluginClass.__name__))
return adderPluginClass
# main
def run(self):
try:
self.logger.debug("new start: %s attemptNr=%s" % (self.jobStatus,self.attemptNr))
# got lock, get the report
report_dict = self.taskBuffer.getJobOutputReport(panda_id=self.jobID, attempt_nr=self.attemptNr)
self.data = report_dict.get('data')
# query job
self.job = self.taskBuffer.peekJobs([self.jobID],fromDefined=False,
fromWaiting=False,
forAnal=True)[0]
# check if job has finished
if self.job is None:
self.logger.debug(': job not found in DB')
elif self.job.jobStatus in ['finished','failed','unknown','merging']:
self.logger.error(': invalid state -> %s' % self.job.jobStatus)
elif self.attemptNr is not None and self.job.attemptNr != self.attemptNr:
self.logger.error('wrong attemptNr -> job=%s <> %s' % (self.job.attemptNr,self.attemptNr))
# elif self.attemptNr is not None and self.job.jobStatus == 'transferring':
# errMsg = 'XML with attemptNr for {0}'.format(self.job.jobStatus)
| # | self.logger.error(errMsg)
elif self.jobStatus == EventServiceUtils.esRegStatus:
# instantiate concrete plugin
adderPluginClass = self.getPluginClass(self.job.VO)
adderPlugin = adderPluginClass(self.job,
taskBuffer=self.taskBuffer,
siteMapper=self.siteMapper,
logger=self.logger)
# execute
self.logger.debug('plugin is ready for ES file registration')
adderPlugin.registerEventServiceFiles()
else:
# check file status in JEDI
if not self.job.isCancelled() and self.job.taskBufferErrorCode not in \
[pandaserver.taskbuffer.ErrorCode.EC_PilotRetried]:
fileCheckInJEDI = self.taskBuffer.checkInputFileStatusInJEDI(self.job)
self.logger.debug("check file status in JEDI : {0}".format(fileCheckInJEDI))
if fileCheckInJEDI is None:
raise RuntimeError('failed to check file status in JEDI')
if fileCheckInJEDI is False:
# set job status to failed since some file status is wrong in JEDI
self.jobStatus = 'failed'
self.job.ddmErrorCode = pandaserver.dataservice.ErrorCode.EC_Adder
errStr = "inconsistent file status between Panda and JEDI. "
errStr += "failed to avoid duplicated processing caused by synchronization failure"
self.job.ddmErrorDiag = errStr
self.logger.debug("set jobStatus={0} since input is inconsistent between Panda and JEDI".format(self.jobStatus))
elif self.job.jobSubStatus in ['pilot_closed']:
# terminated by the pilot
self.logger.debug("going to closed since terminated by the pilot")
retClosed = self.taskBuffer.killJobs([self.jobID],'pilot','60',True)
if retClosed[0] is True:
self.logger.debug("end")
# remove Catalog
self.taskBuffer.deleteJobOutputReport(panda_id=self.jobID, attempt_nr=self.attemptNr)
return
# check for cloned jobs
if EventServiceUtils.isJobCloningJob(self.job):
checkJC = self.taskBuffer.checkClonedJob(self.job)
if checkJC is None:
raise RuntimeError('failed to check the cloned job')
# failed to lock semaphore
if checkJC['lock'] is False:
self.jobStatus = 'failed'
self.job.ddmErrorCode = pandaserver.dataservice.ErrorCode.EC_Adder
self.job.ddmErrorDiag = "failed to lock semaphore for job cloning"
self.logger.debug("set jobStatus={0} since did not get semaphore for job cloning".format(self.jobStatus))
# use failed for cancelled/closed jobs
if self.job.isCancelled():
self.jobStatus = 'failed'
# reset error codes to skip retrial module
self.job.pilotErrorCode = 0
self.job.exeErrorCode = 0
self.job.ddmErrorCode = 0
# keep old status
oldJobStatus = self.job.jobStatus
# set job status
if self |
glue-viz/glue-qt | glue/sandbox/layertree.py | Python | bsd-3-clause | 471 | 0.002123 | im | port glue
from glue.qt.layertreewidget import LayerTreeWidget
def main():
""" Display a layer tree """
from PyQt4.QtGui import QApplication, QMainWindow
import sys
hub = glue.Hub()
app = QApplication(sys.argv)
win = QMainWindow()
ltw = LayerTreeWidget()
ltw.register_to_hub(hub)
ltw.data_collection.register_to_hub(hub)
win.setCentralWidget(ltw)
win.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main( | )
|
Laurawly/tvm-1 | vta/python/vta/top/bitpack.py | Python | apache-2.0 | 2,886 | 0.000347 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=ungrouped-imports
"""Bit packing operators"""
from __future__ import absolute_import as _abs
import tvm
from tvm import te
from tvm.topi import utils
from tvm.relay.op.op import register_compute, register_injective_schedule
from tvm.relay.op.op import register_pattern, OpPattern
def bitpack(data, bits, pack_type="int8", name="bitpack"):
"""Packs lowest dimension into format needed by VTA
Parameters
----------
pack_axis : int
index of the axis to pack in data
bit_axis : int
index of axis to place bit axis in resulting packed data
Returns
-------
packed : Tensor
The packed tensor.
"""
shape_vec = list(data.shape)
if pack_type == "int8":
data_width = 8
elif pack_type == "int16":
data_width = 16
elif pack_type == "int32":
data_width = 32
else:
raise RuntimeError("Unknown pack type %s" % pack_type)
assert data_width % bits == 0
lanes = data_width // bits
# Data must be in multiples of the data_width
assert utils.get_const_int(shape_vec[-1]) % lanes == 0, "Not a multiple of word size"
shape_vec[-1] = shape_vec[-1] // lanes
oshape = tuple(shape_vec)
def _bitpack(*indices):
ret = None
mask = tvm.tir.const((1 << bits) - 1, pack_type)
for k in range(lanes):
idx = list(indices)
idx[-1] = idx[-1] * lanes + k
elem = data(*idx).astype(pack_type)
if k == 0:
ret = elem & mask
else:
val = (elem & mask) << tvm.tir.const(k * bits, pack_type)
ret = ret | val
ret | urn ret
return te.compute(oshape, _bitpack, name=name, tag="bitpack")
@register_compute("bitpack", level=15)
def compute_bitpack(attrs, inputs):
lanes = attrs.lanes
dtype = inputs[0].dtype
assert dtype == "int8"
width = 8
assert width % lanes == 0
bits = 8 // lanes
return bitpack(inputs[0], bits, dtype)
register_injective_schedule("bitpack")
register_patte | rn("bitpack", OpPattern.INJECTIVE)
|
marcoandreini/injection | commands.py | Python | gpl-2.0 | 5,661 | 0.004063 | # Here you can create play commands that are specific to the module, and extend existing commands
import os
import os.path
import shutil
import time
from play.utils import *
MODULE = 'injection'
# Commands that are specific to your module
COMMANDS = ['injection:ec']
HELP = {
'injection:ec': 'Update eclipse .classpath file.'
}
def execute(**kargs):
command = kargs.get("command")
app = kargs.get("app")
args = kargs.get("args")
play_env = kargs.get("env")
is_application = os.path.exists(os.path.join(app.path, 'conf', 'application.conf'))
if is_application:
app.check()
app.check_jpda()
modules = app.modules()
classpath = app.getClasspath()
# determine the name of the project
# if this is an application, the name of the project is in the application.conf file
# if this is a module, we infer the name from the path
application_name = app.readConf('application.name')
vm_arguments = app.readConf('jvm.memory')
javaVersion = getJavaVersion()
print "~ using java version \"%s\"" % javaVersion
if javaVersion.startswith("1.7"):
# JDK 7 compat
vm_arguments = vm_arguments +' -XX:-UseSplitVerifier'
elif javaVersion.startswith("1.8"):
# JDK 8 compatible
vm_arguments = vm_arguments +' -noverify'
if application_name:
application_name = application_name.replace("/", " ")
else:
application_name = os.path.basename(app.path)
dotClasspath = os.path.join(app.path, '.classpath')
shutil.copyfile(os.path.join(play_env["basedir"], 'resources/eclipse/.classpath'), dotClasspath)
playJarPath = os.path.join(play_env["basedir"], 'framework', 'play-%s.jar' % play_env['version'])
playSourcePath = os.path.join(os.path.dirname(playJarPath), 'src')
if os.name == 'nt':
playSourcePath=playSourcePath.replace('\\','/').capitalize()
cpJarToSource = {}
lib_src = os.path.join(app.path, 'tmp/lib-src')
for el in classpath:
# library sources jars in the lib directory
if os.path.basename(el) != "conf" and el.endswith('-sources.jar'):
cpJarToSource[el.replace('-sources', '')] = el
# pointers to source jars produced by 'play deps'
src_file = os.path.join(lib_src, os.path.basename(el) + '.src')
if os.path.exists(src_file):
f = file(src_file)
cpJarToSource[el] = f.readline().rstrip()
f.close()
javadocLocation = {}
for el in classpath:
urlFile = el.replace(r'.jar','.docurl')
if os.path.basename(el) != "conf" and os.path.exists(urlFile):
javadocLocation[el] = urlFile
cpXML = ""
for el in sorted(classpath):
if os.path.basename(el) != "conf":
if el == playJarPath:
cpXML += '<classpathentry kind="lib" path="%s" sourcepath="%s" />\n\t' % (os.path.normpath(el) , playSourcePath)
else:
if cpJarToSource.has_key(el):
cpXML += '<classpathentry kind="lib" path="%s" sourcepath="%s"/>\n\t' % (os.path.normpath(el), cpJarToSource[el])
else:
if javadocLocation.has_key(el):
cpXML += '<classpathentry kind="lib" path="%s">\n\t\t' % os.path.normpath(el)
cpXML += '<attributes>\n\t\t\t'
f = file(javadocLocation[el])
url = f.readline()
f.close()
cpXML += '<attribute name="javadoc_location" value="%s"/>\n\t\t' % (url.strip())
cpXML += '</attributes>\n\t'
cpXML += '</classpathentry>\n\t'
else:
cpXML += '<classpathentry kind="lib" path="%s"/>\n\t' % os.path.normpath(el)
if not is_application:
cpXML += '<classpathentry kind="src" path="src"/>'
replaceAll(dotClasspath, r'%PROJECTCLASSPATH%', cpXML)
# generate source path for test folder if one exists
cpTEST = ""
if os.path.exists(os.path.join(app.path, 'test')):
cpTEST += '<classpathentry kind="src" path="test"/>'
replaceAll(dotClasspath, r'%TESTCLASSPATH%', cpTEST)
if len(modules):
lXML = ""
| cXML = ""
for module in sorted(modules): |
lXML += '<link><name>%s</name><type>2</type><location>%s</location></link>\n' % (os.path.basename(module), os.path.join(module, 'app').replace('\\', '/'))
if os.path.exists(os.path.join(module, "conf")):
lXML += '<link><name>conf/%s</name><type>2</type><location>%s/conf</location></link>\n' % (os.path.basename(module), module.replace('\\', '/'))
if os.path.exists(os.path.join(module, "public")):
lXML += '<link><name>public/%s</name><type>2</type><location>%s/public</location></link>\n' % (os.path.basename(module), module.replace('\\', '/'))
cXML += '<classpathentry kind="src" path="%s"/>\n\t' % (os.path.basename(module))
replaceAll(dotClasspath, r'%MODULES%', cXML)
else:
replaceAll(dotClasspath, r'%MODULES%', '')
print "successfully recreated eclipse classpath, please refresh project"
# This will be executed before any command (new, run...)
def before(**kargs):
command = kargs.get("command")
app = kargs.get("app")
args = kargs.get("args")
env = kargs.get("env")
# This will be executed after any command (new, run...)
def after(**kargs):
command = kargs.get("command")
app = kargs.get("app")
args = kargs.get("args")
env = kargs.get("env")
if command == "new":
pass
|
Integral-Technology-Solutions/ConfigNOW | wlst/apps.py | Python | mit | 6,847 | 0.019863 | # ============================================================================
#
# Copyright (c) 2007-2011 Integral Technology Solutions Pty Ltd,
# All Rights Reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE
# LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# FOR FURTHER INFORMATION PLEASE SEE THE INTEGRAL TECHNOLOGY SOLUTIONS
# END USER LICENSE AGREEMENT (ELUA).
#
# ============================================================================
##
## apps.py
##
## This script contains functions for application deployments.
from java.io import File
import thread
import time
#=======================================================================================
# Load required modules
#=======================================================================================
try:
commonModule
except NameError:
execfile('ConfigNOW/common/common.py')
#=======================================================================================
# Global variables
| #=======================================================================================
appsModule = '1.0.1'
log.debug('Loading module [apps.py] version [' + appsModule + ']')
#=======================================================================================
# Deploy applications |
#=======================================================================================
def deployApps(componentProperties):
"""Deploys applications"""
applications = componentProperties.getProperty('applications')
if applications is None:
log.info('No applications to deploy')
else:
apps = applications.split(',')
for app in apps:
__deployApp('application.' + app, componentProperties)
#=======================================================================================
# Undeploy applications
#=======================================================================================
def undeployApps(componentProperties):
"""Deploys applications"""
applications = componentProperties.getProperty('applications')
if applications is None:
log.info('No applications to undeploy')
else:
apps = applications.split(',')
for app in apps:
__undeployApp('application.' + app, componentProperties=componentProperties)
#=======================================================================================
# Deploy an application
#=======================================================================================
def __deployApp(appPrefix, componentProperties):
"""Deploys an application"""
appName = componentProperties.getProperty(appPrefix + '.name')
appPath = componentProperties.getProperty(appPrefix + '.path')
targets = componentProperties.getProperty(appPrefix + '.targets')
isRemote = componentProperties.getProperty(appPrefix +'.isRemote')
if appPath is None or len(appPath)==0:
appPath = componentProperties.getProperty('applications.default.deploy.path')
appFile = appPath + File.separator + componentProperties.getProperty(appPrefix + '.file')
try:
if isRemote is not None and isRemote.upper()=='TRUE':
log.info('Deploying application Remotely: ' + appName)
progress = deploy(appName, appFile, targets, stageMode='stage',upload='true',remote='true')
else:
log.info('Deploying Application : '+appName)
progress = deploy(appName, appFile, targets)
#log.info('Deploying application: ' + appName)
progress.printStatus()
log.debug(str(appName) + ' has been deployed. Check state ' + str(appName) + '?=' + str(progress.getState()))
log.debug(str(appName) + ' has been deployed. Check if ' + str(appName) + ' is completed?=' + str(progress.isCompleted()))
log.debug(str(appName) + ' has been deployed. Check if ' + str(appName) + ' is running?=' + str(progress.isRunning()))
log.debug(str(appName) + ' has been deployed. Check if ' + str(appName) + ' is failed?=' + str(progress.isFailed()))
log.debug(str(appName) + ' has been deployed. Check message ' + str(appName) + '?=' + str(progress.getMessage()))
except Exception, error:
raise ScriptError, 'Unable to deploy application [' + appName + ']: ' + str(error)
#=======================================================================================
# Undeploy an application
#=======================================================================================
def __undeployApp(appPrefix, componentProperties):
"""Undeploys an application"""
appName = componentProperties.getProperty(appPrefix + '.name')
targets = componentProperties.getProperty(appPrefix + '.targets')
undeployTimeout = componentProperties.getProperty('applications.default.undeploy.timeout')
try:
__stopApp(appName)
log.info('Undeploying application: ' + appName)
progress = undeploy(appName, targets, timeout=undeployTimeout)
log.debug(str(appName) + ' has been undeployed. Check state ' + str(appName) + '?=' + str(progress.getState()))
log.debug(str(appName) + ' has been undeployed. Check if ' + str(appName) + ' is completed?=' + str(progress.isCompleted()))
log.debug(str(appName) + ' has been undeployed. Check if ' + str(appName) + ' is running?=' + str(progress.isRunning()))
log.debug(str(appName) + ' has been undeployed. Check if ' + str(appName) + ' is failed?=' + str(progress.isFailed()))
log.debug(str(appName) + ' has been undeployed. Check message ' + str(appName) + '?=' + str(progress.getMessage()))
if progress.isFailed():
if str(progress.getMessage()).find('Deployer:149001') == -1:
raise ScriptError, 'Unable to undeploy application [' + appName + ']: ' + str(progress.getMessage())
except Exception, error:
raise ScriptError, 'Unable to undeploy application [' + appName + ']: ' + str(error)
#=======================================================================================
# Stop an application
#=======================================================================================
def __stopApp(appName):
"""Stops an application"""
log.info('Stopping application: ' + appName)
try:
progress = stopApplication(appName)
log.debug('Is running? ' + str(progress.isRunning()))
except Exception, error:
raise ScriptError, 'Unable to stop application [' + appName + ']: ' + str(error)
|
edison7500/django-leancloud-sms | testapp/settings.py | Python | gpl-3.0 | 2,833 | 0 | """
Django settings for testapp project.
Generated by 'django-admin startproject' using Django 1.8.18.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file_ | _)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ap@6rnmzb&i&-ch^)hh=f=6a5)aho^f@td16y)-1-b9(p%v*l('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Applicati | on definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'sms',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
SITE_ID = 1
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
# lean cloud config
LEANCLOUD_HEADERS = {
"X-LC-Id": "<set-x-lc-id>",
"X-LC-Key": "<set-x-lc-key>",
"Content-Type": "application/json"
}
LEANCLOUD_SMS_NAME = "jiaxin"
|
huguesv/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/win32com/test/testDictionary.py | Python | apache-2.0 | 2,509 | 0.006377 | # testDictionary.py
#
import sys
import win32com.server.util
import win32com.test.util
import win32com.client
import traceback
import pythoncom
import pywintypes
import winerror
import unittest
error = "dictionary test error"
def MakeTestDictionary():
return win32com.client.Dispatch("Python.Dictionary")
def TestDictAgainst(dict,check):
for key, value in check.items():
if dict(key) != value:
raise error("Indexing for '%s' gave the incorrect value - %s/%s" % (repr(key), repr(dict[key]), repr(check[key])))
# Ensure we have the correct version registered.
def Register(quiet):
import win32com.servers.dictionary
from win32com.test.util import RegisterPythonServer
RegisterPythonServer(win32com.servers.dictionary.__file__, 'Python.Dictionary')
def TestDict(quiet=None):
if quiet is None:
quiet = not "-v" in sys.argv
Register(quiet)
if not quiet: print("Simple enum test")
dict = MakeTestDictionary()
checkDict = {}
TestDictAgainst(dict, checkDict)
dict["NewKey"] = "NewValue"
checkDict["NewKey"] = "NewValue"
TestDictAgainst(dict, checkDict)
dict["NewKey"] = None
del checkDict["NewKey"]
TestDictAgainst(dict, checkDict)
if not quiet:
print("Failure tests")
try:
dict()
raise error("default method with n | o args worked when it shouldnt have!")
except pythoncom.com_error as xxx_todo_changeme:
(hr, desc, exc, argErr) = xxx_todo_changeme.args
if hr != winerror.DISP_E_BADPARAMCOUNT:
raise error("Expected DISP_E_BADPARAMCOUNT - got %d (%s)" % (hr, desc))
try:
dict("hi", "there")
raise error("multiple args worked when it shouldnt have!")
except pythoncom.com_error as xxx_todo_changeme1:
(hr, desc, exc, argErr) = | xxx_todo_changeme1.args
if hr != winerror.DISP_E_BADPARAMCOUNT:
raise error("Expected DISP_E_BADPARAMCOUNT - got %d (%s)" % (hr, desc))
try:
dict(0)
raise error("int key worked when it shouldnt have!")
except pythoncom.com_error as xxx_todo_changeme2:
(hr, desc, exc, argErr) = xxx_todo_changeme2.args
if hr != winerror.DISP_E_TYPEMISMATCH:
raise error("Expected DISP_E_TYPEMISMATCH - got %d (%s)" % (hr, desc))
if not quiet:
print("Python.Dictionary tests complete.")
class TestCase(win32com.test.util.TestCase):
def testDict(self):
TestDict()
if __name__=='__main__':
unittest.main()
|
mikeireland/chronostar | unit_tests/test_unit_expectmax.py | Python | mit | 6,240 | 0.007372 | from __future__ import print_function, division
import numpy as np
import pytest
import sys
import chronostar.likelihood
sys.path.insert(0,'..')
from chronostar import expectmax as em
from chronostar.synthdata import SynthData
from chronostar.component import SphereComponent
from chronostar import tabletool
from chronostar import expectmax
import chronostar.synthdata as syn
# import chronostar.retired2.measurer as ms
# import chronostar.retired2.converter as cv
#
# def test_calcMedAndSpan():
# """
# Test that the median, and +- 34th percentiles is found correctly
# """
# dx = 10.
# dv = 5. |
# dummy_mean = np.array([10,10,10, 5, 5, 5,np.log(dx),np.log(dv),20])
# dummy_std = np.array([1.,1.,1.,1 | .,1.,1.,0.5, 0.5, 3.])
# assert len(dummy_mean) == len(dummy_std)
# npars = len(dummy_mean)
#
# nsteps = 10000
# nwalkers = 18
#
# dummy_chain = np.array([np.random.randn(nsteps)*std + mean
# for (std, mean) in zip(dummy_std, dummy_mean)]).T
# np.repeat(dummy_chain, 18, axis=0).reshape(nwalkers,nsteps,npars)
#
# med_and_span = em.calcMedAndSpan(dummy_chain)
# assert np.allclose(dummy_mean, med_and_span[:,0], atol=0.1)
# approx_stds = 0.5*(med_and_span[:,1] - med_and_span[:,2])
# assert np.allclose(dummy_std, approx_stds, atol=0.1)
def test_calcMembershipProbs():
"""
Even basicer. Checks that differing overlaps are
correctly mapped to memberships.
"""
# case 1
star_ols = [10, 10]
assert np.allclose([.5,.5], em.calc_membership_probs(np.log(star_ols)))
# case 2
star_ols = [10, 30]
assert np.allclose([.25,.75], em.calc_membership_probs(np.log(star_ols)))
# case 3
star_ols = [10, 10, 20]
assert np.allclose([.25, .25, .5],
em.calc_membership_probs(np.log(star_ols)))
def test_expectation():
"""
Super basic, generates some association stars along
with some background stars and checks membership allocation
is correct
"""
age = 1e-5
ass_pars1 = np.array([0, 0, 0, 0, 0, 0, 5., 2., age])
comp1 = SphereComponent(ass_pars1)
ass_pars2 = np.array([100., 0, 0, 20, 0, 0, 5., 2., age])
comp2 = SphereComponent(ass_pars2)
starcounts = [100,100]
synth_data = SynthData(pars=[ass_pars1, ass_pars2],
starcounts=starcounts)
synth_data.synthesise_everything()
tabletool.convert_table_astro2cart(synth_data.table)
true_memb_probs = np.zeros((np.sum(starcounts), 2))
true_memb_probs[:starcounts[0], 0] = 1.
true_memb_probs[starcounts[0]:, 1] = 1.
# star_means, star_covs = tabletool.buildDataFromTable(synth_data.astr_table)
# all_lnols = em.getAllLnOverlaps(
# synth_data.astr_table, [comp1, comp2]
# )
fitted_memb_probs = em.expectation(
tabletool.build_data_dict_from_table(synth_data.table),
[comp1, comp2]
)
assert np.allclose(true_memb_probs, fitted_memb_probs, atol=1e-10)
'''
@pytest.mark.skip
def test_fit_many_comps_gradient_descent_with_multiprocessing():
"""
Added by MZ 2020 - 07 - 13
Test if maximisation works when using gradient descent and multiprocessing.
"""
age = 1e-5
ass_pars1 = np.array([0, 0, 0, 0, 0, 0, 5., 2., age])
comp1 = SphereComponent(ass_pars1)
starcounts = [100,]
synth_data = SynthData(pars=[ass_pars1,],
starcounts=starcounts)
synth_data.synthesise_everything()
tabletool.convert_table_astro2cart(synth_data.table)
true_memb_probs = np.zeros((np.sum(starcounts), 2))
true_memb_probs[:starcounts[0], 0] = 1.
true_memb_probs[starcounts[0]:, 1] = 1.
ncomps = len(starcounts)
best_comps, med_and_spans, memb_probs = \
expectmax.fit_many_comps(synth_data.table, ncomps,
rdir='test_gradient_descent_multiprocessing',
#~ init_memb_probs=None,
#~ init_comps=None,
trace_orbit_func=None,
optimisation_method='Nelder-Mead',
nprocess_ncomp = True,
)
'''
@pytest.mark.skip(reason='Too long for unit tests. Put this in integration instead')
def test_maximisation_gradient_descent_with_multiprocessing_tech():
"""
Added by MZ 2020 - 07 - 13
Test if maximisation works when using gradient descent and multiprocessing.
NOTE: this is not a test if maximisation returns appropriate results but
it only tests if the code runs withour errors. This is mainly to test
multiprocessing.
"""
age = 1e-5
ass_pars1 = np.array([0, 0, 0, 0, 0, 0, 5., 2., age])
comp1 = SphereComponent(ass_pars1)
starcounts = [100,]
synth_data = SynthData(pars=[ass_pars1,],
starcounts=starcounts)
synth_data.synthesise_everything()
tabletool.convert_table_astro2cart(synth_data.table)
true_memb_probs = np.zeros((np.sum(starcounts), 1))
true_memb_probs[:starcounts[0], 0] = 1.
#~ true_memb_probs[starcounts[0]:, 1] = 1.
ncomps = len(starcounts)
noise = np.random.rand(ass_pars1.shape[0])*5
all_init_pars = [ass_pars1 + noise]
new_comps, all_samples, _, all_init_pos, success_mask =\
expectmax.maximisation(synth_data.table, ncomps,
true_memb_probs, 100, 'iter00',
all_init_pars,
optimisation_method='Nelder-Mead',
nprocess_ncomp=True,
)
# TODO: test if new_comps, all_samples, _, all_init_pos, success_mask are of the right format.
# def test_background_overlaps():
# """
# Author: Marusa Zerjal, 2019 - 05 - 26
# Compare background overlap with KDE and background overlap with tiny covariance matrix
# :return:
# """
# background_means = tabletool.build_data_dict_from_table(kernel_density_input_datafile,
# only_means=True,
# )
# ln_bg_ols_kde = em.get_kernel_densities(background_means,
# # star_means, )
if __name__=='__main__':
test_maximisation_gradient_descent_with_multiprocessing_tech()
|
plotly/plotly.py | packages/python/plotly/plotly/validators/layout/ternary/baxis/tickformatstop/_name.py | Python | mit | 453 | 0 | import _plotly_utils.basevalidators
class NameValidator(_plotly_utils.bas | evalidators.StringValidator):
def __init__(
self,
plotly_name="name",
parent_name="layout.ternary.baxis.tickformatstop",
**kwargs
):
super(NameValidator, self).__init__(
plotly_name=plotly_name,
parent_name=p | arent_name,
edit_type=kwargs.pop("edit_type", "plot"),
**kwargs
)
|
activityworkshop/Murmeli | murmeli/signals.py | Python | gpl-2.0 | 1,289 | 0.000776 | '''Simple signal and timer functionality to remove dependency on Qt'''
import threading
import time
class Signal:
'''A signal which can be connected to one or more listeners'''
def __init__(self):
self._listeners = []
def clear(self):
'''Clear all the listeners'''
self._listeners.c | lear()
def connect(self, listener):
'''Connect this signal to an additional listener'''
if listener:
self._listeners.append(listener)
def fire(self):
'''Fire the signal to each of the listeners in turn'''
for listener in self._listeners:
l | istener()
class Timer:
'''Calls a given method either repeatedly or after a given period'''
def __init__(self, delay, target, repeated=True):
self.delay = delay
self.target = target
self.repeated = repeated
self.running = True
threading.Thread(target=self.run).start()
def run(self):
'''Run in separate thread'''
while self.running:
time.sleep(self.delay)
if self.running:
self.target()
self.running = self.running and self.repeated
def stop(self):
'''Stop the separate thread from running'''
self.running = False
|
gecos-team/gecos-firstboot | firstboot/pages/linkToChef/LinkToChefConfEditorPage.py | Python | gpl-2.0 | 7,126 | 0.003369 | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
# This file is part of Guadalinex
#
# This software is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this package; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
__author__ = "Antonio Hernández <ahernandez@emergya.com>"
__copyright__ = "Copyright (C) 2011, Junta de Andalucía <devmaster@guadalinex.org>"
__license__ = "GPL-2"
import LinkToChefHostnamePage
import LinkToChefResultsPage
import firstboot.pages.linkToChef
from firstboot_lib import PageWindow
from firstboot import serverconf
import firstboot.validation as validation
import gettext
from gettext import gettext as _
gettext.textdomain('firstboot')
__REQUIRED__ = False
__DEFAULT_ROLE__ = 'default_group'
def get_page(main_window):
page = LinkToChefConfEditorPage(main_window)
return page
class LinkToChefConfEditorPage(PageWindow.PageWindow):
__gtype_name__ = "LinkToChefConfEditorPage"
def finish_initializing(self):
self.update_server_conf = False
self.chef_is_configured = False
self.unlink_from_chef = False
def load_page(self, params=None):
if 'server_conf' in params:
self.server_conf = params['server_conf']
if not self.server_conf is None:
self.ui.lblVersionValue.set_label(self.server_conf.get_version())
self.ui.lblOrganizationValue.set_label(self.server_conf.get_organization())
self.ui.lblNotesValue.set_label(self.server_conf.get_notes())
self.ui.txtUrlChef.set_text(self.server_conf.get_chef_conf().get_url())
self.ui.txtUrlChefCert.set_text(self.server_conf.get_chef_conf().get_pem_url())
self.ui.txtHostname.set_text(self.server_conf.get_chef_conf().get_hostname())
self.ui.txtDefaultRole.set_text(self.server_conf.get_chef_conf().get_default_role())
if self.server_conf is None:
self.server_conf = serverconf.ServerConf()
if len(self.ui.txtD | efaultRole.get_text()) == 0:
self.ui.txtDefaultRole.set_text(__DEFAULT_ROLE__)
self.update_server_conf = True
self.chef_is_configured = params['ch | ef_is_configured']
self.unlink_from_chef = params['unlink_from_chef']
# if self.chef_is_configured and self.unlink_from_chef:
# self.ui.chkChef.get_child().set_markup(self._bold(_('This \
#workstation is going to be unlinked from the Chef server.')))
def _bold(self, str):
return '<b>%s</b>' % str
def translate(self):
desc = _('These parameters are required in order to join a Chef server:')
self.ui.lblDescription.set_text(desc)
self.ui.lblUrlChefDesc.set_label(_('"Chef URL": an existant URL in your server where Chef is installed.'))
self.ui.lblUrlChefCertDesc.set_label(_('"Chef Certificate": Validation certificate URL\
in order to autoregister this workstation in the Chef server.'))
self.ui.lblHostnameDesc.set_label(_('"Node Name": must be an unique name.'))
self.ui.lblDefaultRoleDesc.set_label(_('"Default Group": a global group for all the workstations in your organization.\nIf you are not an advanced Chef administrator, do not change this.'))
self.ui.lblVersion.set_label(_('Version'))
self.ui.lblOrganization.set_label(_('Organization'))
self.ui.lblNotes.set_label(_('Comments'))
self.ui.lblUrlChef.set_label('Chef URL')
self.ui.lblUrlChefCert.set_label(_('Certificate URL'))
self.ui.lblHostname.set_label(_('Node Name'))
self.ui.lblDefaultRole.set_label(_('Default Group'))
def previous_page(self, load_page_callback):
load_page_callback(firstboot.pages.linkToChef)
def next_page(self, load_page_callback):
if not self.unlink_from_chef:
result, messages = self.validate_conf()
if result == True:
result, messages = serverconf.setup_server(
server_conf=self.server_conf,
link_ldap=False,
unlink_ldap=False,
link_chef=not self.unlink_from_chef,
unlink_chef=self.unlink_from_chef
)
load_page_callback(LinkToChefResultsPage, {
'server_conf': self.server_conf,
'result': result,
'messages': messages
})
else:
result, messages = serverconf.setup_server(
server_conf=self.server_conf,
link_chef=not self.unlink_from_chef,
unlink_chef=self.unlink_from_chef
)
load_page_callback(LinkToChefResultsPage, {
'result': result,
'server_conf': self.server_conf,
'messages': messages
})
def on_serverConf_changed(self, entry):
if not self.update_server_conf:
return
self.server_conf.get_chef_conf().set_url(self.ui.txtUrlChef.get_text())
self.server_conf.get_chef_conf().set_pem_url(self.ui.txtUrlChefCert.get_text())
self.server_conf.get_chef_conf().set_default_role(self.ui.txtDefaultRole.get_text())
self.server_conf.get_chef_conf().set_hostname(self.ui.txtHostname.get_text())
def validate_conf(self):
valid = True
messages = []
if not self.server_conf.get_chef_conf().validate():
valid = False
messages.append({'type': 'error', 'message': _('Chef and Chef Cert URLs must be valid URLs.')})
hostname = self.server_conf.get_chef_conf().get_hostname()
if not validation.is_qname(hostname):
valid = False
messages.append({'type': 'error', 'message': _('Node name is empty or contains invalid characters.')})
try:
used_hostnames = serverconf.get_chef_hostnames(self.server_conf.get_chef_conf())
except Exception as e:
used_hostnames = []
# IMPORTANT: Append the error but don't touch the variable "valid" here,
# just because if we can't get the hostnames here,
# Chef will inform us about that later, while we are registering
# the client.
messages.append({'type': 'error', 'message': str(e)})
if hostname in used_hostnames:
valid = False
messages.append({'type': 'error', 'message': _('Node name already exists in the Chef server. Choose a different one.')})
return valid, messages
|
nugget/home-assistant | homeassistant/components/plum_lightpad/light.py | Python | apache-2.0 | 5,479 | 0 | """Support for Plum Lightpad lights."""
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_HS_COLOR, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, Light)
from homeassistant.components.plum_lightpad import PLUM_DATA
import homeassistant.util.color as color_util
DEPENDENCIES = ['plum_lightpad']
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Initialize the Plum Lightpad Light and GlowRing."""
if discovery_info is None:
return
plum = hass.data[PLUM_DATA]
entities = []
if 'lpid' in discovery_info:
lightpad = plum.get_lightpad(discovery_info['lpid'])
entities.append(GlowRing(lightpad=lightpad))
if 'llid' in discovery_info:
logical_load = plum.get_load(discovery_info['llid'])
entities.append(PlumLight(load=logical_load))
if entities:
async_add_entities(entities)
class PlumLight(Light):
"""Representation of a Plum Lightpad dimmer."""
def __init__(self, load):
"""Initialize the light."""
self._load = load
self._brightness = load.level
async def async_added_to_hass(self):
"""Subscribe to dimmerchange events."""
self._load.add_event_listener('dimmerchange', self.dimmerchange)
def dimmerchange(self, event):
"""Change event handler updating the brightness."""
self._brightness = event['level']
self.schedule_update_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the switch if any."""
return self._load.name
@property
def brightness(self) -> int:
"""Return the brightness of this switch between 0..255."""
return self._brightness
@property
def is_on(self) -> bool:
"""Return true if light is on."""
return self._brightness > 0
@property
def supported_features(self):
"""Flag supported features."""
if self._load.dimmable:
return SUPPORT_BRIGHTNESS
return None
async def async_turn_on(self, **kwargs):
"""Turn the light on."""
if ATTR_BRIGHTNESS in kwargs:
await self._load.turn_on(kwargs[ATTR_BRIGHTNESS])
else:
await self._load.turn_on()
async def async_turn_off(self, **kwargs):
"""Turn the light off."""
await self._load.turn_off()
class GlowRing(Light):
"""Representation of a Plum Lightpad dimmer glow ring."""
def __init__(self, lightpad):
"""Initialize the light."""
self._lightpad = lightpad
self._name = '{} Glow Ring'.format(lightpad.friendly_name)
self._state = lightpad.glow_enabled
self._brightness = lightpad.glow_intensity * 255.0
self._red = lightpad.glow_color['red']
self._green = lightpad.glow_color['green']
self._blue = lightpad.glow_color['blue']
async def async_added_to_hass(self):
"""Subscribe to configchange events."""
self._lightpad.add_event_listener(
'configchange', self.configchange_event)
def configchange_event(self, event):
"""Handle Configuration change event."""
config = event['changes']
self._state = config['glowEnabled']
self._brightness = config['glowIntensity'] * 255.0
self._red = config['glowColor']['red']
self._green = config['glowColor']['green']
self._blue = config['glowColor']['blue']
self.schedule_update_ha_state()
@property
def hs_color(self):
"""Return the hue and saturation color value [float, float]."""
return color_util.color_RGB_to_hs(self._red, self._green, self._blue)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the switch if any."""
return self._name
@property
def brightness(self) -> int:
"""Return | the brightness of this switch between 0..255."""
return self._brightness
@property
def glow_intensity(self):
"""Brightness in float form."""
return self._brightness / 255.0
@property
def is_on(self) -> bool:
"""Return true if light is on."""
return self._state
@property
def icon(self):
"""Return the crop-portrait icon representing the glow ring."""
return 'mdi:crop-port | rait'
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS | SUPPORT_COLOR
async def async_turn_on(self, **kwargs):
"""Turn the light on."""
if ATTR_BRIGHTNESS in kwargs:
await self._lightpad.set_config(
{"glowIntensity": kwargs[ATTR_BRIGHTNESS]})
elif ATTR_HS_COLOR in kwargs:
hs_color = kwargs[ATTR_HS_COLOR]
red, green, blue = color_util.color_hs_to_RGB(*hs_color)
await self._lightpad.set_glow_color(red, green, blue, 0)
else:
await self._lightpad.set_config({"glowEnabled": True})
async def async_turn_off(self, **kwargs):
"""Turn the light off."""
if ATTR_BRIGHTNESS in kwargs:
await self._lightpad.set_config(
{"glowIntensity": kwargs[ATTR_BRIGHTNESS]})
else:
await self._lightpad.set_config({"glowEnabled": False})
|
yegorshr/CDNetworksAPI | cdnetworks/__init__.py | Python | apache-2.0 | 186 | 0 | fr | om .base import Base
from .helper import select_item_by_user
from .actions import Actions
from .browser import Browser
__all__ = ['select_ | item_by_user', 'Base', 'Actions', 'Browser']
|
alxgu/ansible | lib/ansible/modules/messaging/rabbitmq/rabbitmq_binding.py | Python | gpl-3.0 | 9,638 | 0.002594 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Manuel Sousa <manuel.sousa@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: rabbitmq_binding
author: Manuel Sousa (@m | anuel-sousa)
version_added: "2.0"
| short_description: Manage rabbitMQ bindings
description:
- This module uses rabbitMQ REST APIs to create / delete bindings.
requirements: [ "requests >= 1.0.0" ]
options:
state:
description:
- Whether the bindings should be present or absent.
choices: [ "present", "absent" ]
default: present
name:
description:
- source exchange to create binding on.
required: true
aliases: [ "src", "source" ]
destination:
description:
- destination exchange or queue for the binding.
required: true
aliases: [ "dst", "dest" ]
destination_type:
description:
- Either queue or exchange.
required: true
choices: [ "queue", "exchange" ]
aliases: [ "type", "dest_type" ]
routing_key:
description:
- routing key for the binding.
default: "#"
arguments:
description:
- extra arguments for exchange. If defined this argument is a key/value dictionary
required: false
default: {}
extends_documentation_fragment:
- rabbitmq
'''
EXAMPLES = '''
# Bind myQueue to directExchange with routing key info
- rabbitmq_binding:
name: directExchange
destination: myQueue
type: queue
routing_key: info
# Bind directExchange to topicExchange with routing key *.info
- rabbitmq_binding:
name: topicExchange
destination: topicExchange
type: exchange
routing_key: '*.info'
'''
import json
import traceback
REQUESTS_IMP_ERR = None
try:
import requests
HAS_REQUESTS = True
except ImportError:
REQUESTS_IMP_ERR = traceback.format_exc()
HAS_REQUESTS = False
from ansible.module_utils.six.moves.urllib import parse as urllib_parse
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.rabbitmq import rabbitmq_argument_spec
class RabbitMqBinding(object):
def __init__(self, module):
"""
:param module:
"""
self.module = module
self.name = self.module.params['name']
self.login_user = self.module.params['login_user']
self.login_password = self.module.params['login_password']
self.login_host = self.module.params['login_host']
self.login_port = self.module.params['login_port']
self.login_protocol = self.module.params['login_protocol']
self.vhost = self.module.params['vhost']
self.destination = self.module.params['destination']
self.destination_type = 'q' if self.module.params['destination_type'] == 'queue' else 'e'
self.routing_key = self.module.params['routing_key']
self.arguments = self.module.params['arguments']
self.verify = self.module.params['ca_cert']
self.cert = self.module.params['client_cert']
self.key = self.module.params['client_key']
self.props = urllib_parse.quote(self.routing_key) if self.routing_key != '' else '~'
self.base_url = '{0}://{1}:{2}/api/bindings'.format(self.login_protocol,
self.login_host,
self.login_port)
self.url = '{0}/{1}/e/{2}/{3}/{4}/{5}'.format(self.base_url,
urllib_parse.quote(self.vhost, safe=''),
urllib_parse.quote(self.name, safe=''),
self.destination_type,
urllib_parse.quote(self.destination, safe=''),
self.props)
self.result = {
'changed': False,
'name': self.module.params['name'],
}
self.authentication = (
self.login_user,
self.login_password
)
self.request = requests
self.http_check_states = {
200: True,
404: False,
}
self.http_actionable_states = {
201: True,
204: True,
}
self.api_result = self.request.get(self.url, auth=self.authentication)
def run(self):
"""
:return:
"""
self.check_presence()
self.check_mode()
self.action_mode()
def check_presence(self):
"""
:return:
"""
if self.check_should_throw_fail():
self.fail()
def change_required(self):
"""
:return:
"""
if self.module.params['state'] == 'present':
if not self.is_present():
return True
elif self.module.params['state'] == 'absent':
if self.is_present():
return True
return False
def is_present(self):
"""
:return:
"""
return self.http_check_states.get(self.api_result.status_code, False)
def check_mode(self):
"""
:return:
"""
if self.module.check_mode:
result = self.result
result['changed'] = self.change_required()
result['details'] = self.api_result.json() if self.is_present() else self.api_result.text
result['arguments'] = self.module.params['arguments']
self.module.exit_json(**result)
def check_reply_is_correct(self):
"""
:return:
"""
if self.api_result.status_code in self.http_check_states:
return True
return False
def check_should_throw_fail(self):
"""
:return:
"""
if not self.is_present():
if not self.check_reply_is_correct():
return True
return False
def action_mode(self):
"""
:return:
"""
result = self.result
if self.change_required():
if self.module.params['state'] == 'present':
self.create()
if self.module.params['state'] == 'absent':
self.remove()
if self.action_should_throw_fail():
self.fail()
result['changed'] = True
result['destination'] = self.module.params['destination']
self.module.exit_json(**result)
else:
result['changed'] = False
self.module.exit_json(**result)
def action_reply_is_correct(self):
"""
:return:
"""
if self.api_result.status_code in self.http_actionable_states:
return True
return False
def action_should_throw_fail(self):
"""
:return:
"""
if not self.action_reply_is_correct():
return True
return False
def create(self):
"""
:return:
"""
self.url = '{0}/{1}/e/{2}/{3}/{4}'.format(self.base_url,
urllib_parse.quote(self.vhost, safe=''),
urllib_parse.quote(self.name, safe=''),
self.destination_type,
urllib_parse.quote(self.destination, safe=''))
self.api_result = self.request.post(self.url,
auth=self.authentication,
verify=self.verify,
cert=(self.cert, self.key),
headers={"content-type": "application/json"},
data=json.dumps({
|
kennedyshead/home-assistant | tests/components/zwave_js/test_api.py | Python | apache-2.0 | 58,118 | 0.000465 | """Test the Z-Wave JS Websocket API."""
import json
from unittest.mock import patch
import pytest
from zwave_js_server.const import LogLevel
from zwave_js_server.event import Event
from zwave_js_server.exceptions import (
FailedCommand,
InvalidNewValue,
NotFoundError,
SetValueFailed,
)
from homeassistant.components.websocket_api.const import ERR_NOT_FOUND
from homeassistant.components.zwave_js.api import (
COMMAND_CLASS_ID,
CONFIG,
ENABLED,
ENTRY_ID,
ERR_NOT_LOADED,
FILENAME,
FORCE_CONSOLE,
ID,
LEVEL,
LOG_TO_FILE,
NODE_ID,
OPTED_IN,
PROPERTY,
PROPERTY_KEY,
TYPE,
VALUE,
)
from homeassistant.components.zwave_js.const import (
CONF_DATA_COLLECTION_OPTED_IN,
DOMAIN,
)
from homeassistant.helpers import device_registry as dr
async def test_network_status(hass, integration, hass_ws_client):
"""Test the network status websocket command."""
entry = integration
ws_client = await hass_ws_client(hass)
await ws_client.send_json(
{ID: 2, TYPE: "zwave_js/network_status", ENTRY_ID: entry.entry_id}
)
msg = await ws_client.receive_json()
result = msg["result"]
assert result["client"]["ws_server_url"] == "ws://test:3000/zjs"
assert result["client"]["server_version"] == "1.0.0"
# Test sending command with not loaded entry fails
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
await ws_client.send_json(
{ID: 3, TYPE: "zwave_js/network_status", ENTRY_ID: entry.entry_id}
)
msg = await ws_client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == ERR_NOT_LOADED
async def test_node_status(hass, multisensor_6, integration, hass_ws_client):
"""Test the node status websocket command."""
entry = integration
ws_client = await hass_ws_client(hass)
node = multisensor_6
await ws_client.send_json(
{
ID: 3,
TYPE: "zwave_js/node_status",
ENTRY_ID: entry.entry_id,
NODE_ID: node.node_id,
}
)
msg = await ws_client.receive_json()
result = msg["result"]
assert result[NODE_ID] == 52
assert result["ready"]
assert result["is_routing"]
assert not result["is_secure"]
assert result["status"] == 1
# Test getting non-existent node fails
await ws_client.send_json(
{
ID: 4,
TYPE: "zwave_js/node_status",
ENTRY_ID: entry.entry_id,
NODE_ID: 99999,
}
)
msg = await ws_client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == ERR_NOT_FOUND
# Test sending command with not loaded entry fails
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
await ws_client.send_json(
{
ID: 5,
TYPE: "zwave_js/node_status",
ENTRY_ID: entry.entry_id,
NODE_ID: node.node_id,
}
)
msg = await ws_client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == ERR_NOT_LOADED
async def test_node_metadata(hass, wallmote_central_scene, integration, hass_ws_client):
"""Test the node metadata websocket command."""
entry = integration
ws_client = await hass_ws_client(hass)
node = wallmote_central_scene
await ws_client.send_json(
{
ID: 3,
TYPE: "zwave_js/node_metadata",
ENTRY_ID: entry.entry_id,
NODE_ID: node.node_id,
}
)
msg = await ws_client.receive_json()
result = msg["result"]
assert result[NODE_ID] == 35
assert result["inclusion"] == (
"To add the ZP3111 to the Z-Wave network (inclusion), place the Z-Wave "
"primary controller into inclusion mode. Press the Program Switch of ZP3111 "
"for sending the NIF. After sending NIF, Z-Wave will send the auto inclusion, "
"otherwise, ZP3111 will go to sleep after 20 seconds."
)
assert result["exclusion"] == (
"To remove the ZP3111 from the Z-Wave network (exclusion), place the Z-Wave "
"primary controller into \u201cexclusion\u201d mode, and following its "
"instruction to delete the ZP3111 to the controller. Press the Program Switch "
"of ZP3111 once to be excluded."
)
assert result["reset"] == (
"Remove cover to triggered tamper switch, LED flash once & send out Alarm "
"Report. Press Program Switch 10 times within 10 seconds, ZP3111 will send "
"the \u201cDevice Reset Locally Notification\u201d command and reset to the "
"factory default. (Remark: This is to be used only in the case of primary "
"controller being inoperable or otherwise unavailable.)"
)
assert result["manual"] == (
"https://products.z-wavealliance.org/ProductManual/File?folder=&filename=MarketCertificationFiles/2479/ZP3111-5_R2_20170316.pdf"
)
assert not result["wakeup"]
assert (
result["device_database_url"]
== "https://devices.zwave-js.io/?jumpTo=0x0086:0x0002:0x0082:0.0"
)
# Test getting non-existent node fails
await ws_client.send_json(
{
ID: 4,
TYPE: "zwave_js/node_metadata",
ENTRY_ID: entry.entry_id,
NODE_ID: 99999,
}
)
msg = await ws_client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == ERR_NOT_FOUND
# Test sending command with not loaded entry fails
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
await ws_client.send_json(
{
ID: 5,
TYPE: "zwave_js/node_metadata",
ENTRY_ID: entry.entry_id,
NODE_ID: node.node_id,
}
)
msg = await ws_client.receive_json()
assert | not msg["success"]
assert msg["error"]["code"] == ERR_NOT_LOADED
asy | nc def test_ping_node(
hass, wallmote_central_scene, integration, client, hass_ws_client
):
"""Test the ping_node websocket command."""
entry = integration
ws_client = await hass_ws_client(hass)
node = wallmote_central_scene
client.async_send_command.return_value = {"responded": True}
await ws_client.send_json(
{
ID: 3,
TYPE: "zwave_js/ping_node",
ENTRY_ID: entry.entry_id,
NODE_ID: node.node_id,
}
)
msg = await ws_client.receive_json()
assert msg["success"]
assert msg["result"]
# Test getting non-existent node fails
await ws_client.send_json(
{
ID: 4,
TYPE: "zwave_js/ping_node",
ENTRY_ID: entry.entry_id,
NODE_ID: 99999,
}
)
msg = await ws_client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == ERR_NOT_FOUND
# Test sending command with not loaded entry fails
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
await ws_client.send_json(
{
ID: 5,
TYPE: "zwave_js/ping_node",
ENTRY_ID: entry.entry_id,
NODE_ID: node.node_id,
}
)
msg = await ws_client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == ERR_NOT_LOADED
async def test_add_node(
hass, nortek_thermostat_added_event, integration, client, hass_ws_client
):
"""Test the add_node websocket command."""
entry = integration
ws_client = await hass_ws_client(hass)
client.async_send_command.return_value = {"success": True}
await ws_client.send_json(
{ID: 3, TYPE: "zwave_js/add_node", ENTRY_ID: entry.entry_id}
)
msg = await ws_client.receive_json()
assert msg["success"]
event = Event(
type="inclusion started",
data={
"source": "controller",
"event": "inclusion started",
"secure": False,
},
)
client.driver.receive_event(event)
msg = await ws_client.receive_json()
assert msg["event"]["event"] |
dit/dit | tests/profiles/test_mui.py | Python | bsd-3-clause | 907 | 0.002205 | """
Tests for dit.profiles.MUIProfile. Known examples taken from http://arxiv.org/abs/1409.4708 .
"""
import pytest
import numpy as np
from dit import Distribution
from dit.profiles import MUIProfile
ex1 = Distribution(['000', '001', '010', '011', '100', '101', '110', '111'], [1 / 8] * 8)
ex2 = Distribution(['000', '111'], [1 / 2] * 2)
ex3 = Distributio | n(['000', '001', '110', '111'], [1 / 4] * 4)
ex4 = Distribution(['000', '011', '101', '110'], [1 / 4] * 4)
examples = [ex1, ex2, ex3, ex4]
pytest.importorskip('scipy')
@pytest.mark.parametrize(('d', 'prof', 'width'), [
(ex1, {0.0: 1.0}, [3.0]),
(ex2, {0.0: 3.0}, [1.0]),
(ex3, {0.0: 2.0, 1.0: 1.0}, [1.0, 1.0]),
(ex4, {0.0: 3 / 2}, [2.0]),
])
def test_mui_profile(d, prof, width):
"""
Test against known examples.
"""
mui = MUIProfile(d)
assert mui.profile == prof
as | sert np.allclose(mui.widths, width)
|
lablup/sorna-common | src/ai/backend/common/utils.py | Python | lgpl-3.0 | 9,171 | 0.00109 | from __future__ import annotations
import base64
from collections import OrderedDict
from datetime import timedelta
from itertools import chain
import numbers
import random
import re
import sys
from typing import (
Any,
Iterable,
Iterator,
Mapping,
Tuple,
TYPE_CHECKING,
TypeVar,
Union,
)
import uuid
if TYPE_CHECKING:
from decimal import Decimal
# It is a bad practice to keep all "miscellaneous" stuffs
# into the single "utils" module.
# Let's categorize them by purpose and domain, and keep
# refactoring to use the proper module names.
from .asyncio import ( # for legacy imports # noqa
AsyncBarrier,
cancel_tasks,
current_loop,
run_through,
)
from .enum_extension import StringSetFlag # for legacy imports # noqa
from .files import AsyncFileWriter # for legacy imports # noqa
from .networking import ( # for legacy imports # noqa
curl,
find_free_port,
)
from .types import BinarySize
KT = TypeVar('KT')
VT = TypeVar('VT')
def env_info() -> str:
"""
Returns a string that contains the Python version and runtime path.
"""
v = sys.version_info
pyver = f'Python {v.major}.{v.minor}.{v.micro}'
if v.releaselevel == 'alpha':
pyver += 'a'
if v.releaselevel == 'beta':
pyver += 'b'
if v.releaselevel == 'candidate':
pyver += 'rc'
if v.releaselevel != 'final':
pyver += str(v.serial)
return f'{pyver} (env: {sys.prefix})'
def odict(*args: Tuple[KT, VT]) -> OrderedDict[KT, VT]:
"""
A short-hand for the constructor of OrderedDict.
:code:`odict(('a',1), ('b',2))` is equivalent to
:code:`OrderedDict([('a',1), ('b',2)])`.
"""
return OrderedDict(args)
def dict2kvlist(o: Mapping[KT, VT]) -> Iterable[Union[KT, VT]]:
"""
Serializes a dict-like object into a generator of the flatten list of
repeating key-value pairs. It is useful when using HMSET method in Redis.
Example:
>>> list(dict2kvlist({'a': 1, 'b': 2}))
['a', 1, 'b', 2]
"""
return chain.from_iterable((k, v) for k, v in o.items())
def generate_uuid() -> str:
u = uuid.uuid4()
# Strip the last two padding characters because u always has fixed length.
return base64.urlsafe_b64encode(u.bytes)[:-2].decode('ascii')
def get_random_seq(length: float, num_points: int, min_distance: float) -> Iterator[float]:
"""
Generate a random sequence of numbers within the range [0, length]
with the given number of points and the minimum distance between the points.
Note that X ( = the minimum distance d x the number of points N) must be equivalent to or smaller than
the length L + d to guarantee the the minimum distance between the points.
If X == L + d, the points are always equally spaced with d.
:return: An iterator over the generated sequence
"""
assert num_points * min_distance <= length + min_distance, \
'There are too many points or it has a too large distance which cannot be fit into the given length.'
extra = length - (num_points - 1) * min_distance
ro = [random.uniform(0, 1) for _ in range(num_points + 1)]
sum_ro = sum(ro)
rn = [extra * r / sum_ro for r in ro[0:num_points]]
spacing = [min_distance + rn[i] for i in range(num_points)]
cumulative_sum = 0.0
for s in spacing:
cumulative_sum += s
yield cumulative_sum - min_distance
def nmget(
o: Mapping[str, Any],
key_path: str,
def_val: Any = None,
path_delimiter: str = '.',
null_as_default: bool = True,
) -> Any:
"""
A short-hand for retrieving a value from nested mappings
("nested-mapping-get"). At each level it checks if the given "path"
component in the given key exists and return the default value whenever
fails.
Example:
>>> o = {'a':{'b':1}, 'x': None}
>>> nmget(o, 'a', 0)
{'b': 1}
>>> nmget(o, 'a.b', 0)
1
>>> nmget(o, 'a/b', 0, '/')
1
>>> nmget(o, 'a.c', 0)
0
>>> nmget(o, 'x', 0)
0
>>> nmget(o, 'x', 0, null_as_default=False)
| None
"""
pieces = key_path.split(path_delimiter)
while pieces:
p = pieces.pop(0)
if o is None or p not in o:
return def_val
o = o[p]
if o is None and null_as_default:
return def_val
return o
def readable_size_to_bytes(expr: Any) -> BinarySize | Decimal:
| if isinstance(expr, numbers.Real):
return BinarySize(expr)
return BinarySize.from_str(expr)
def str_to_timedelta(tstr: str) -> timedelta:
"""
Convert humanized timedelta string into a Python timedelta object.
Example:
>>> str_to_timedelta('30min')
datetime.timedelta(seconds=1800)
>>> str_to_timedelta('1d1hr')
datetime.timedelta(days=1, seconds=3600)
>>> str_to_timedelta('2hours 15min')
datetime.timedelta(seconds=8100)
>>> str_to_timedelta('20sec')
datetime.timedelta(seconds=20)
>>> str_to_timedelta('300')
datetime.timedelta(seconds=300)
>>> str_to_timedelta('-1day')
datetime.timedelta(days=-1)
"""
_rx = re.compile(r'(?P<sign>[+|-])?\s*'
r'((?P<days>\d+(\.\d+)?)(d|day|days))?\s*'
r'((?P<hours>\d+(\.\d+)?)(h|hr|hrs|hour|hours))?\s*'
r'((?P<minutes>\d+(\.\d+)?)(m|min|mins|minute|minutes))?\s*'
r'((?P<seconds>\d+(\.\d+)?)(s|sec|secs|second|seconds))?$')
match = _rx.match(tstr)
if not match:
try:
return timedelta(seconds=float(tstr)) # consider bare number string as seconds
except TypeError:
pass
raise ValueError('Invalid time expression')
groups = match.groupdict()
sign = groups.pop('sign', None)
if set(groups.values()) == {None}:
raise ValueError('Invalid time expression')
params = {n: -float(t) if sign == '-' else float(t) for n, t in groups.items() if t}
return timedelta(**params) # type: ignore
class FstabEntry:
"""
Entry class represents a non-comment line on the `fstab` file.
"""
def __init__(self, device, mountpoint, fstype, options, d=0, p=0) -> None:
self.device = device
self.mountpoint = mountpoint
self.fstype = fstype
if not options:
options = 'defaults'
self.options = options
self.d = d
self.p = p
def __eq__(self, o):
return str(self) == str(o)
def __str__(self):
return "{} {} {} {} {} {}".format(self.device,
self.mountpoint,
self.fstype,
self.options,
self.d,
self.p)
class Fstab:
"""
Reader/writer for fstab file.
Takes aiofile pointer for async I/O. It should be writable if add/remove
operations are needed.
NOTE: This class references Jorge Niedbalski R.'s gist snippet.
We have been converted it to be compatible with Python 3
and to support async I/O.
(https://gist.github.com/niedbalski/507e974ed2d54a87ad37)
"""
def __init__(self, fp) -> None:
self._fp = fp
def _hydrate_entry(self, line):
return FstabEntry(*[x for x in line.strip('\n').split(' ') if x not in ('', None)])
async def get_entries(self):
await self._fp.seek(0)
for line in await self._fp.readlines():
try:
line = line.strip()
if not line.startswith("#"):
yield self._hydrate_entry(line)
except TypeError:
pass
async def get_entry_by_attr(self, attr, value):
async for entry in self.get_entries():
e_attr = getattr(entry, attr)
if e_attr == value:
return entry
return None
async def add_entry(self, entry):
if await self.get_entry_by_attr('device', entry.device):
return False
await self._fp.write(str(entry) + '\n')
await self._fp.truncate()
return entry
async def add(self, de |
xhorn/xchorizon | horizon/usage/tables.py | Python | apache-2.0 | 2,032 | 0 | from django.utils.translation imp | ort ugettext_lazy as _
from django.template.defaultfilters import timesince, floatformat
from horizon import tables
from horizon.templatetags.sizeformat import mbformat
class CSVSummary(tables.LinkAction):
name = "csv_summary"
verbose_name = _("Download CSV Summary")
classes = ("btn-download",)
def get_ | link_url(self, usage=None):
return self.table.kwargs['usage'].csv_link()
class BaseUsageTable(tables.DataTable):
vcpus = tables.Column('vcpus', verbose_name=_("VCPUs"))
disk = tables.Column('local_gb', verbose_name=_("Disk"))
memory = tables.Column('memory_mb',
verbose_name=_("RAM"),
filters=(mbformat,))
hours = tables.Column('vcpu_hours', verbose_name=_("VCPU Hours"),
filters=(lambda v: floatformat(v, 2),))
class GlobalUsageTable(BaseUsageTable):
tenant = tables.Column('tenant_id', verbose_name=_("Project ID"))
disk_hours = tables.Column('disk_gb_hours',
verbose_name=_("Disk GB Hours"),
filters=(lambda v: floatformat(v, 2),))
def get_object_id(self, datum):
return datum.tenant_id
class Meta:
name = "global_usage"
verbose_name = _("Usage Summary")
columns = ("tenant", "vcpus", "disk", "memory",
"hours", "disk_hours")
table_actions = (CSVSummary,)
multi_select = False
class TenantUsageTable(BaseUsageTable):
instance = tables.Column('name', verbose_name=_("Instance Name"))
uptime = tables.Column('uptime_at',
verbose_name=_("Uptime"),
filters=(timesince,))
def get_object_id(self, datum):
return id(datum)
class Meta:
name = "tenant_usage"
verbose_name = _("Usage Summary")
columns = ("instance", "vcpus", "disk", "memory", "uptime")
table_actions = (CSVSummary,)
multi_select = False
|
prasanna08/oppia | scripts/linters/test_files/invalid_test_only.py | Python | apache-2.0 | 1,448 | 0 | # coding: utf-8
#
# Copyright 2020 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python file with invalid syntax, used by scripts/linters/
python_linter_test.py. This file is using test_only as a function name which
is not allowed.
"""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-m | odules
import python_utils
class FakeClass(python_utils.OBJECT):
"""This is a fake docstring for invalid syntax purposes."""
def __init__(self, fake_arg):
self.fake_arg = fake_arg
# Use of test_only is not allowed in non-test files.
def test_only_method(self, name):
"""This doesn't do anything.
Args:
name: str. Means nothing.
Yields:
tup | le(str, str). The argument passed in but twice in a tuple.
"""
yield (name, name)
|
drsmith48/fdp | fdp/lib/shot.py | Python | mit | 4,629 | 0.00108 | # -*- coding: utf-8 -*-
"""
Created on Wed Nov 25 12:14:03 2015
@author: ktritz
"""
from __future__ import print_function
from builtins import str, range
import inspect
import types
#import numpy as np
from collections.abc import MutableMapping
from .container import containerClassFactory
class Shot(MutableMapping):
# _modules = None
_logbook = None
_machine = None
def __init__(self, shot, machine):
self.shot = shot
# set class attributes if needed
cls = self.__class__
if cls._machine is None:
cls._machine = machine
# if cls._modules is None:
# cls._modules = {module: None for module in self._machine._modules}
if cls._logbook is None:
cls._logbook = self._machine._logbook
self._logbook_entries = self._logbook.get_entries(shot=self.shot)
self._efits = []
self._modules = {module: None for module in self._machine._modules}
self.xp = self._get_xp()
self.date = self._get_date()
def _get_xp(self):
# query logbook for XP, return XP (list if needed)
xplist = []
for entry in self._logbook_entries:
if entry['xp']:
xplist.append(entry['xp'])
return list(set(xplist))
def _get_date(self):
# query logbook for rundate, return | rundate
if self._logbook_entries:
return self._logbook_entries[0]['rundate']
else:
return
def __getatt | r__(self, attr_name):
if attr_name in self._modules:
if self._modules[attr_name] is None:
self._modules[attr_name] = containerClassFactory(attr_name,
root=self._machine,
shot=self.shot,
parent=self)
return self._modules[attr_name]
else:
try:
attr = getattr(self._machine, attr_name)
except AttributeError as e:
# print('{} is not attribute of {}'.format(attr_name, self._machine._name))
raise e
if inspect.ismethod(attr):
return types.MethodType(attr.__func__, self)
else:
return attr
def __repr__(self):
return '<Shot {}>'.format(self.shot)
def __str__(self):
return 'Shot {}'.format(self.shot)
def __iter__(self):
# return iter(self._modules.values())
return iter(self._modules)
def __contains__(self, key):
return key in self._modules
def __len__(self):
return len(list(self._modules.keys()))
def __delitem__(self, item):
pass
def __getitem__(self, item):
return self._modules[item]
def __setitem__(self, item, value):
pass
def __dir__(self):
return list(self._modules.keys())
def logbook(self):
# show logbook entries
if not self._logbook_entries:
self._logbook_entries = self._logbook.get_entries(shot=self.shot)
if self._logbook_entries:
print('Logbook entries for {}'.format(self.shot))
for entry in self._logbook_entries:
print('************************************')
print(('{shot} on {rundate} in XP {xp}\n'
'{username} in topic {topic}\n\n'
'{text}').format(**entry))
print('************************************')
else:
print('No logbook entries for {}'.format(self.shot))
def get_logbook(self):
# return a list of logbook entries
if not self._logbook_entries:
self._logbook_entries = self._logbook.get_entries(shot=self.shot)
return self._logbook_entries
def check_efit(self):
if len(self._efits):
return self._efits
trees = ['efit{}'.format(str(index).zfill(2)) for index in range(1, 7)]
trees.extend(['lrdfit{}'.format(str(index).zfill(2))
for index in range(1, 13)])
if self.shot == 0:
return trees
tree_exists = []
for tree in trees:
data = None
connection = self._get_connection(self.shot, tree)
try:
data = connection.get('\\{}::userid'.format(tree)).value
except:
pass
if data and data != '*':
tree_exists.append(tree)
self._efits = tree_exists
return self._efits
|
synergeticsedx/deployment-wipro | lms/djangoapps/oauth2_handler/handlers.py | Python | agpl-3.0 | 8,254 | 0.001696 | """ Handlers for OpenID Connect provider. """
from django.conf import settings
from django.core.cache import cache
from courseware.access import has_access
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.lang_pref import LANGUAGE_KEY
from openedx.core.djangoapps.user_api.models import UserPreference
from student.models import anonymous_id_for_user
from student.models import UserProfile
from student.roles import GlobalStaff, CourseStaffRole, CourseInstructorRole
class OpenIDHandler(object):
""" Basic OpenID Connect scope handler. """
def scope_openid(self, _data):
""" Only override the sub (subject) claim. """
return ['sub']
def claim_sub(self, data):
"""
Return the value of the sub (subject) claim. The value should be
unique for each user.
"""
# Use the anonymous ID without any course as unique identifier.
# Note that this ID is derived using the value of the `SECRET_KEY`
# setting, this means that users will have different sub
# values for different deployments.
value = ano | nymous_id_for_user(data['user'], None)
return value
class PermissionsHandler(object) | :
""" Permissions scope handler """
def scope_permissions(self, _data):
return ['administrator']
def claim_administrator(self, data):
"""
Return boolean indicating user's administrator status.
For our purposes an administrator is any user with is_staff set to True.
"""
return data['user'].is_staff
class ProfileHandler(object):
""" Basic OpenID Connect `profile` scope handler with `locale` claim. """
def scope_profile(self, _data):
""" Add specialized claims. """
return ['name', 'locale']
def claim_name(self, data):
""" User displayable full name. """
user = data['user']
profile = UserProfile.objects.get(user=user)
return profile.name
def claim_locale(self, data):
"""
Return the locale for the users based on their preferences.
Does not return a value if the users have not set their locale preferences.
"""
# Calling UserPreference directly because it is not clear which user made the request.
language = UserPreference.get_value(data['user'], LANGUAGE_KEY)
# If the user has no language specified, return the default one.
if not language:
language = settings.LANGUAGE_CODE
return language
class CourseAccessHandler(object):
"""
Defines two new scopes: `course_instructor` and `course_staff`. Each one is
valid only if the user is instructor or staff of at least one course.
Each new scope has a corresponding claim: `instructor_courses` and
`staff_courses` that lists the course_ids for which the user has instructor
or staff privileges.
The claims support claim request values: if there is no claim request, the
value of the claim is the list all the courses for which the user has the
corresponding privileges. If a claim request is used, then the value of the
claim the list of courses from the requested values that have the
corresponding privileges.
For example, if the user is staff of course_a and course_b but not
course_c, the claim corresponding to the scope request:
scope = openid course_staff
has the value:
{staff_courses: [course_a, course_b] }
For the claim request:
claims = {userinfo: {staff_courses: {values=[course_b, course_d]}}}
the corresponding claim will have the value:
{staff_courses: [course_b] }.
This is useful to quickly determine if a user has the right privileges for a
given course.
For a description of the function naming and arguments, see:
`edx_oauth2_provider/oidc/handlers.py`
"""
COURSE_CACHE_TIMEOUT = getattr(settings, 'OIDC_COURSE_HANDLER_CACHE_TIMEOUT', 60) # In seconds.
def __init__(self, *_args, **_kwargs):
self._course_cache = {}
def scope_course_instructor(self, data):
"""
Scope `course_instructor` valid only if the user is an instructor
of at least one course.
"""
# TODO: unfortunately there is not a faster and still correct way to
# check if a user is instructor of at least one course other than
# checking the access type against all known courses.
course_ids = self.find_courses(data['user'], CourseInstructorRole.ROLE)
return ['instructor_courses'] if course_ids else None
def scope_course_staff(self, data):
"""
Scope `course_staff` valid only if the user is an instructor of at
least one course.
"""
# TODO: see :method:CourseAccessHandler.scope_course_instructor
course_ids = self.find_courses(data['user'], CourseStaffRole.ROLE)
return ['staff_courses'] if course_ids else None
def claim_instructor_courses(self, data):
"""
Claim `instructor_courses` with list of course_ids for which the
user has instructor privileges.
"""
return self.find_courses(data['user'], CourseInstructorRole.ROLE, data.get('values'))
def claim_staff_courses(self, data):
"""
Claim `staff_courses` with list of course_ids for which the user
has staff privileges.
"""
return self.find_courses(data['user'], CourseStaffRole.ROLE, data.get('values'))
def find_courses(self, user, access_type, values=None):
"""
Find all courses for which the user has the specified access type. If
`values` is specified, check only the courses from `values`.
"""
# Check the instance cache and update if not present. The instance
# cache is useful since there are multiple scope and claims calls in the
# same request.
key = (user.id, access_type)
if key in self._course_cache:
course_ids = self._course_cache[key]
else:
course_ids = self._get_courses_with_access_type(user, access_type)
self._course_cache[key] = course_ids
# If values was specified, filter out other courses.
if values is not None:
course_ids = list(set(course_ids) & set(values))
return course_ids
# pylint: disable=missing-docstring
def _get_courses_with_access_type(self, user, access_type):
# Check the application cache and update if not present. The application
# cache is useful since there are calls to different endpoints in close
# succession, for example the id_token and user_info endpoints.
key = '-'.join([str(self.__class__), str(user.id), access_type])
course_ids = cache.get(key)
if not course_ids:
course_keys = CourseOverview.get_all_course_keys()
# Global staff have access to all courses. Filter courses for non-global staff.
if not GlobalStaff().has_user(user):
course_keys = [course_key for course_key in course_keys if has_access(user, access_type, course_key)]
course_ids = [unicode(course_key) for course_key in course_keys]
cache.set(key, course_ids, self.COURSE_CACHE_TIMEOUT)
return course_ids
class IDTokenHandler(OpenIDHandler, ProfileHandler, CourseAccessHandler, PermissionsHandler):
""" Configure the ID Token handler for the LMS. """
def claim_instructor_courses(self, data):
# Don't return list of courses unless they are requested as essential.
if data.get('essential'):
return super(IDTokenHandler, self).claim_instructor_courses(data)
else:
return None
def claim_staff_courses(self, data):
# Don't return list of courses unless they are requested as essential.
if data.get('essential'):
return super(IDTokenHandler, self).claim_staff_courses(data)
else:
return None
class UserInfoHandler(OpenIDHandler, ProfileHandler, CourseAccessHandler, PermissionsH |
repotvsupertuga/tvsupertuga.repository | script.module.xbmcswift2/lib/xbmcswift2/xbmcmixin.py | Python | gpl-2.0 | 22,104 | 0.000724 | import os
import sys
import time
import shelve
import urllib
from datetime import timedelta
from functools import wraps
import xbmcswift2
from xbmcswift2 import xbmc, xbmcaddon, xbmcplugin, xbmcgui
from xbmcswift2.storage import TimedStorage
from xbmcswift2.logger import log
from xbmcswift2.constants import VIEW_MODES, SortMethod
from common import Modes, DEBUG_MODES
from request import Request
class XBMCMixin(object):
'''A mixin to add XBMC helper methods. In order to use this mixin,
the child class must implement the following methods and
properties:
# Also, the child class is responsible for ensuring that this path
# exists.
self.storage_path
self.added_items
self.request
self.addon
_end_of_directory = False
_update_listing
self.handle
# optional
self.info_type: should be in ['video', 'music', 'pictures']
_memoized_storage = None
_unsynced_storages = None
# TODO: Ensure above is implemented
'''
_function_cache_name = '.functions'
def cached(self, TTL=60 * 24):
'''A decorator that will cache the output of the wrapped function. The
key used for the cache is the function name as well as the `*args` and
`**kwargs` passed to the function.
:param TTL: time to live in minutes
.. note:: For route caching, you should use
:meth:`xbmcswift2.Plugin.cached_route`.
'''
def decorating_function(function):
# TODO test this method
storage = self.get_storage(self._function_cache_name, file_format='pickle',
TTL=TTL)
kwd_mark = 'f35c2d973e1bbbc61ca60fc6d7ae4eb3'
@wraps(function)
def wrapper(*args, **kwargs):
key = (function.__name__, kwd_mark,) + args
if kwargs:
key += (kwd_mark,) + tuple(sorted(kwargs.items()))
try:
result = storage[key]
log.debug('Storage hit for function "%s" with args "%s" '
'and kwargs "%s"', function.__name__, args,
kwargs)
except KeyError:
log.debug('Storage miss for function "%s" with args "%s" '
'and kwargs "%s"', function.__name__, args,
kwargs)
result = function(*args, **kwargs)
storage[key] = result
storage.sync()
return result
return wrapper
return decorating_function
def clear_function_cache(self):
'''Clears the storage that caches results when using
:meth:`xbmcswift2.Plugin.cached_route` or
:meth:`xbmcswift2.Plugin.cached`.
'''
self.get_storage(self._function_cache_name).clear()
def list_storages(self):
'''Returns a list of existing stores. The returned names can then be
used to call get_storage().
'''
# Filter out any storages used by xbmcswift2 so caller doesn't corrupt
# them.
return [name for name in os.listdir(self.storage_path)
if not name.startswith('.')]
def get_storage(self, name='main', file_format='pickle', TTL=None):
'''Returns a storage for the given name. The returned storage is a
fully functioning python dictionary and is designed to be used that
way. It is usually not necessary for the caller to load or save the
storage manually. If the storage does not already exist, it will be
created.
.. seealso:: :class:`xbmcswift2.TimedStorage` for more details.
:param name: The name of the storage to retrieve.
:param file_format: Choices are 'pickle', 'csv', and 'json'. Pickle is
recommended as it supports python objects.
.. note:: If a storage already exists for the given
name, the file_format parameter is
ignored. The format will be determined by
the existing storage file.
:param TTL: The time to live for storage items specified in minutes or None
for no expiration. Since storage items aren't expired until a
storage is loaded form disk, it is possible to call
get_storage() with a different TTL than when the storage was
created. The currently specified TTL is always honored.
'''
if not hasattr(self, '_unsynced_storages'):
self._unsynced_storages = {}
filename = os.path.join(self.storage_path, name)
try:
storage = self._unsynced_storages[filename]
log.debug('Loaded storage "%s" from memory', name)
except KeyError:
if TTL:
TTL = timedelta(minutes=TTL)
try:
storage = TimedStorage(filename, file_format, TTL)
except ValueError:
# Thrown when the storage file is corrupted and can't be read.
# Prompt user to delete storage.
choices = ['Clear storage', 'Cancel']
ret = xbmcgui.Dialog().select('A storage file is corrupted. It'
' is recommended to clear it.',
choices)
if ret == 0:
os.remove(filename)
storage = TimedStorage(filename, file_format, TTL)
else:
raise Exception('Corrupted storage file at %s' % filename)
self._unsynced_storages[filename] = storage
log.debug('Loaded storage "%s" from disk', name)
return storage
def temp_fn(self, path):
return os.path.join(xbmc.translatePath('special://temp/'), path)
def get_string(self, stringid):
'''Returns the localized string from strings.xml for the given
stringid.
'''
stringid = int(stringid)
if not hasattr(self, '_strings'):
self._strings = {}
if not stringid in self._strings:
self._strings[stringid] = self.addon.getLocalizedString(stringid)
return self._strings[stringid]
def set_c | ontent(self, content):
'''Sets the content type for the plugin.'''
# TODO: Change to a warning instead of an assert. Otherwise will have
# to keep this list in sync with
# any XBMC changes.
#contents = ['files', 'songs', 'artists', 'albums', 'movies',
#'tvshows', 'episodes', 'musicvideos']
#assert content in content | s, 'Content type "%s" is not valid' % content
xbmcplugin.setContent(self.handle, content)
def get_setting(self, key, converter=None, choices=None):
'''Returns the settings value for the provided key.
If converter is str, unicode, bool or int the settings value will be
returned converted to the provided type.
If choices is an instance of list or tuple its item at position of the
settings value be returned.
.. note:: It is suggested to always use unicode for text-settings
because else xbmc returns utf-8 encoded strings.
:param key: The id of the setting defined in settings.xml.
:param converter: (Optional) Choices are str, unicode, bool and int.
:param converter: (Optional) Choices are instances of list or tuple.
Examples:
* ``plugin.get_setting('per_page', int)``
* ``plugin.get_setting('password', unicode)``
* ``plugin.get_setting('force_viewmode', bool)``
* ``plugin.get_setting('content', choices=('videos', 'movies'))``
'''
#TODO: allow pickling of settings items?
# TODO: STUB THIS OUT ON CLI
value = self.addon.getSetting(id=key)
if converter is str:
return value
elif converter is unicode:
|
timothy1191xa/project-epsilon-1 | code/utils/tests/test_mask.py | Python | bsd-3-clause | 1,918 | 0.008342 | """test_mask.py
Tests for the functions in the mask_functions.py
Run with:
nosetests test_mask.py
"""
from __future__ import print_function
import os, sys
import numpy as np
from numpy.testing import assert_array_equal
#Append path to functions
sys.path.append(os.path.join(os.path.dirname(__file__), "../functions/"))
from mask_functions import *
def test_apply_mask():
# We make a 3D array of shape (3,3,2)
slab0 = np.reshape(np.arange(9), (3, 3))
slab1 = np.reshape(np.arange(100, 109), (3, 3))
arr_3d = np.zeros((2, 3, 3))
arr_3d[0, :, :] = slab0
arr_3d[1, :, :] = slab1
# We make a mask as a 3D array of shape (2,3,3)
# with zeros on the 2nd component of the 1st dimension
mask_3d = np.zeros((2, 3, 3))
mask_3d[0] = np.ones((3,3))
# Defined the resulting masked array
masked_arr = np.zeros((2,3,3))
masked_arr[0, :, :] = slab0
assert_array_equal(apply_mask(arr_3d, mask_3d),masked_arr)
def test_make_binary_mask():
# We make a 3D array of shape (3,3,2)
slab0 = np.reshape(np.arange(9), (3, 3))
slab1 = np.reshape(np.arange(100, 109), (3, 3))
arr_3d = np.zeros((2, 3, 3))
arr_3d[0, :, :] = slab0
arr_3d[1, :, :] = slab1
# We make a mask boolean as a 3D array of shape (2,3,3)
# that filtered the values below 100
mask_bool = arr_3d < 100
mask_3d = np.zeros((2, 3, 3))
mask_3d[0] = np.ones((3,3))
assert_array_equal(make_binary_mask(arr_3d,mask_bool), mask_ | 3d)
arr_2d = np.arange(9).reshape((3,3))
mask_bool2d = arr_2d < 10
# make_binary_mask(arr_3d,mask_bool2d)
def test_make_bool_mask():
# We make a 3D array of shape (3,3,2)
slab3 = np.ones((3,))
arr_3d = np.zeros((3, 3))
a | rr_3d[0, :] = slab3
mask_bool = make_bool_mask(arr_3d)
assert_array_equal(mask_bool[0, :],np.ones((3,), dtype=bool))
assert_array_equal(mask_bool[1:,: ],np.zeros((2, 3), dtype=bool))
|
GoogleCloudPlatform/appengine-blobstoremgmt-python | src/main.py | Python | apache-2.0 | 730 | 0 | # Copyright 2018 Google Inc. All rights rese | rved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF A | NY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Main WSGI app for blob-management tool.
"""
import webapp2
from app import routes
APP = webapp2.WSGIApplication(routes.ROUTES)
|
thinkopensolutions/server-tools | auto_backup/tests/test_db_backup.py | Python | agpl-3.0 | 8,529 | 0 | # -*- coding: utf-8 -*-
# © 2015 Agile Business Group <http://www.agilebg.com>
# © 2015 Alessio Gerace <alesiso.gerace@agilebg.com>
# © 2016 Grupo ESOC Ingeniería de Servicios, S.L.U. - Jairo Llopis
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import os
import mock
from datetime import datetime
from contextlib import contextmanager
from odoo.tests import common
from odoo import exceptions, tools
try:
import pysftp
except ImportError:
pass
model = 'odoo.addons.auto_backup.models.db_backup'
class TestConnectionException(pysftp.ConnectionException):
def __init__(self):
super(TestConnectionException, self).__init__('test', 'test')
class TestDbBackup(common.TransactionCase):
def setUp(self):
super(TestDbBackup, self).setUp()
self.Model = self.env["db.backup"]
@contextmanager
def mock_assets(self):
""" It provides mocked core assets """
self.path_join_val = '/this/is/a/path'
with mock.patch('%s.db' % model) as db:
with mock.patch('%s.os' % model) as os:
with mock.patch('%s.shutil' % model) as shutil:
os.path.join.return_value = self.path_join_val
yield {
'db': db,
'os': os,
'shutil': shutil,
}
@contextmanager
def patch_filtered_sftp(self, record, mocks=None):
""" It patches filtered record and provides a mock """
if mocks is None:
mocks = ['sftp_connection']
mocks = {m: mock.DEFAULT for m in mocks}
with mock.patch.object(record, 'filtered') as filtered:
with mock.patch.object(record, 'backup_log'):
with mock.patch.multiple(record, **mocks):
filtered.side_effect = [], [record]
yield filtered
def new_record(self, method='sftp'):
vals = {
'name': u'Têst backup',
'method': method,
}
if method == 'sftp':
vals.update({
'sftp_host': 'test_host',
'sftp_port': '222',
'sftp_user': 'tuser',
'sftp_password': 'password',
'folder': '/folder/',
})
self.vals = vals
return self.Model.create(vals)
def test_compute_name_sftp(self):
""" It should create proper SFTP URI """
rec_id = self.new_record()
self.assertEqual(
'sftp://%(user)s@%(host)s:%(port)s%(folder)s' % {
'user': self.vals['sftp_user'],
'host': self.vals['sftp_host'],
'port': self.vals['sftp_port'],
| 'folder': self.vals['folder'],
},
rec_id.name,
)
def test_check_folder(self):
""" It should not allow recursive backups """
rec_id = self.new_record('local')
with self.assertRaises(exceptions.ValidationError):
rec_id.write({
'folder': '%s/another/path' % tools | .config.filestore(
self.env.cr.dbname
),
})
@mock.patch('%s._' % model)
def test_action_sftp_test_connection_success(self, _):
""" It should raise connection succeeded warning """
rec_id = self.new_record()
with mock.patch.object(rec_id, 'sftp_connection'):
with self.assertRaises(exceptions.Warning):
rec_id.action_sftp_test_connection()
_.assert_called_once_with("Connection Test Succeeded!")
@mock.patch('%s._' % model)
def test_action_sftp_test_connection_fail(self, _):
""" It should raise connection fail warning """
rec_id = self.new_record()
with mock.patch.object(rec_id, 'sftp_connection') as conn:
conn().__enter__.side_effect = TestConnectionException
with self.assertRaises(exceptions.Warning):
rec_id.action_sftp_test_connection()
_.assert_called_once_with("Connection Test Failed!")
def test_action_backup_local(self):
""" It should backup local database """
rec_id = self.new_record('local')
filename = rec_id.filename(datetime.now())
rec_id.action_backup()
generated_backup = [f for f in os.listdir(rec_id.folder)
if f >= filename]
self.assertEqual(1, len(generated_backup))
def test_action_backup_sftp_mkdirs(self):
""" It should create remote dirs """
rec_id = self.new_record()
with self.mock_assets():
with self.patch_filtered_sftp(rec_id):
conn = rec_id.sftp_connection().__enter__()
rec_id.action_backup()
conn.makedirs.assert_called_once_with(rec_id.folder)
def test_action_backup_sftp_mkdirs_conn_exception(self):
""" It should guard from ConnectionException on remote.mkdirs """
rec_id = self.new_record()
with self.mock_assets():
with self.patch_filtered_sftp(rec_id):
conn = rec_id.sftp_connection().__enter__()
conn.makedirs.side_effect = TestConnectionException
rec_id.action_backup()
# No error was raised, test pass
self.assertTrue(True)
def test_action_backup_sftp_remote_open(self):
""" It should open remote file w/ proper args """
rec_id = self.new_record()
with self.mock_assets() as assets:
with self.patch_filtered_sftp(rec_id):
conn = rec_id.sftp_connection().__enter__()
rec_id.action_backup()
conn.open.assert_called_once_with(
assets['os'].path.join(),
'wb'
)
def test_action_backup_sftp_remote_open(self):
""" It should open remote file w/ proper args """
rec_id = self.new_record()
with self.mock_assets() as assets:
with self.patch_filtered_sftp(rec_id):
conn = rec_id.sftp_connection().__enter__()
rec_id.action_backup()
conn.open.assert_called_once_with(
assets['os'].path.join(),
'wb'
)
def test_action_backup_all_search(self):
""" It should search all records """
rec_id = self.new_record()
with mock.patch.object(rec_id, 'search'):
rec_id.action_backup_all()
rec_id.search.assert_called_once_with([])
def test_action_backup_all_return(self):
""" It should return result of backup operation """
rec_id = self.new_record()
with mock.patch.object(rec_id, 'search'):
res = rec_id.action_backup_all()
self.assertEqual(
rec_id.search().action_backup(), res
)
@mock.patch('%s.pysftp' % model)
def test_sftp_connection_init_passwd(self, pysftp):
""" It should initiate SFTP connection w/ proper args and pass """
rec_id = self.new_record()
rec_id.sftp_connection()
pysftp.Connection.assert_called_once_with(
host=rec_id.sftp_host,
username=rec_id.sftp_user,
port=rec_id.sftp_port,
password=rec_id.sftp_password,
)
@mock.patch('%s.pysftp' % model)
def test_sftp_connection_init_key(self, pysftp):
""" It should initiate SFTP connection w/ proper args and key """
rec_id = self.new_record()
rec_id.write({
'sftp_private_key': 'pkey',
'sftp_password': 'pkeypass',
})
rec_id.sftp_connection()
pysftp.Connection.assert_called_once_with(
host=rec_id.sftp_host,
username=rec_id.sftp_user,
port=rec_id.sftp_port,
private_key=rec_id.sftp_private_key,
private_key_pass=rec_id.sftp_password,
)
@mock.patch('%s.pysftp' % model)
def test_sftp_connection_return(self, pysftp):
""" It should return new sftp connection """
rec_id = self.new_re |
linucks/ample | ample/parsers/tests/test_tm_parser.py | Python | bsd-3-clause | 1,471 | 0 | """Test functions for parsers.tm_parser"""
import os
import unittest
from ample import constants
from ample.parsers import tm_parser
cl | ass TestTMscore(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath(os.path.dirname(__file__))
cls.ample_share = constants.SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share, 'testfiles')
def test_parse(self):
logfile = os.path.join(self.testfiles_dir, "tmscore.log")
| TM = tm_parser.TMscoreLogParser()
TM.parse(logfile)
self.assertEqual(173, TM.nr_residues_common)
self.assertEqual(6.654, TM.rmsd)
self.assertEqual(0.5512, TM.tm)
self.assertEqual(0.3147, TM.maxsub)
self.assertEqual(0.4292, TM.gdtts)
self.assertEqual(0.2283, TM.gdtha)
class TestTMalign(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath(os.path.dirname(__file__))
cls.ample_share = constants.SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share, 'testfiles')
def test_parse(self):
logfile = os.path.join(self.testfiles_dir, "tmalign.log")
TM = tm_parser.TMalignLogParser()
TM.parse(logfile)
self.assertEqual(143, TM.nr_residues_common)
self.assertEqual(0.70502, TM.tm)
self.assertEqual(2.68, TM.rmsd)
self.assertEqual(0.182, TM.seq_id)
if __name__ == "__main__":
unittest.main()
|
mgabay/Variable-Size-Vector-Bin-Packing | vsvbp/__init__.py | Python | gpl-3.0 | 37 | 0 | from .be | nchmark import run_benchmark | |
yv84/pyph | src/tests/integrate_tests/msg_log.py | Python | mit | 2,730 | 0.003297 | import sys
import os
PACKAGE_PARENT = '../..'
SCRIPT_DIR = os.path.dirname(os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__))))
sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, PACKAGE_PARENT)))
from proxy.repr_to_bytes import repr_bytes_to_bytes_gen
class Message():
def __init__(self, side, log=None, side_log=None):
self.side = side
self.msg = b''
self.pin_pong = self.pin_pong_f(log, side_log)
def pin_pong_f(self, _log, _side_log):
log = {'client' : [], 'server' : [], }
last_string = b''
last_side = 'client'
for i in range(len(_log)):
if i == 0:
last_string = _log[i]
last_side = 'client'
elif i in _side_log[last_side]:
last_string = b''.join([last_string, _log[i]])
elif i not in _side_log[last_side]:
log[last_side].append(last_string)
last_string = _log[i]
last_side = 'server' if last_side == 'client' else 'client'
_pin_pong = {}
if self.side == 'client':
_pin_pong.update({b'':log['client'].pop(0)})
while log['client'] and log['server']:
_pin_pong.update({log['server'].pop(0):log['client'].pop(0)})
| elif self.side == 'server':
while log['client'] and log['server']:
_pin_pong.update({log['client'].pop(0):log['s | erver'].pop(0)})
return _pin_pong
def __call__(self, msg=b''):
self.msg = b''.join([self.msg, msg])
if self.msg in self.pin_pong:
yield self.pin_pong[self.msg]
self.msg = b''
@staticmethod
def game_log_from_import(log):
_log = []
_side_log = {'client': [], 'server': []}
i = 0
while log:
if log and log[0].get('C') != None:
_side_log['client'].append(i)
_log.append(log.pop(0)['C'])
elif log and log[0].get('S') != None:
_side_log['server'].append(i)
_log.append(log.pop(0)['S'])
else:
raise Exception("S/C key wrong")
i += 1
return (_log, _side_log,)
@staticmethod
def get_log_from_file(f, pattern):
log = []
with open(f, 'rb') as f:
for line in f:
line = b''.join(repr_bytes_to_bytes_gen(line))
if line[0:len(pattern['c'])] == pattern['c']:
log.append({"C": line[pattern['start']:pattern['end']]})
elif line[0:len(pattern['s'])] == pattern['s']:
log.append({"S": line[pattern['start']:pattern['end']]})
return log
|
neurospin/localizer | entities.py | Python | lgpl-2.1 | 916 | 0.001092 | # -*- coding: utf-8 -*-
# copyright 2013 CEA (Saclay, FRANCE), all rights reserved.
# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://brainomics.cea.fr -- mailto:localizer94@cea.fr
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundatio | n, either version 2.1 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You sho | uld have received a copy of the GNU Lesser General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""cubicweb-localizer entity's classes"""
|
Blimeo/Java | out/production/matthew/Contests/TopCoder/SRM007/Dice.py | Python | apache-2.0 | 637 | 0.069074 | from itertools import repeat
def getScores(nu | mPlayers, dieRolls):
dieRolls = list(dieRolls)
scores = list(repeat(0, numPlayers))
print(scores)
c = 0
last = -1
num = 0
for i in dieRolls:
v = dieRolls[i]
if not (v == last and num > 0):
if v <= 2:
scores[c] += 1
elif v <= 4:
scores[c] += 2
else:
scores[c] += 3
if v == last:
num += 1
if num == 1:
scores[c] += v
else:
scores[c] += v*num
else:
num = 0
if v % 2 == 0:
c -= 1
else:
c += 1
last = v
if c == -1:
c = numPlayers - 1
| c = c % numPlayers
return scores
print(getScores(5, [3,4,4,4,4,6,6,2,1,5,5]))
|
drmonkeysee/ecs-scheduler | ecs_scheduler/webapi/home.py | Python | mit | 1,021 | 0 | """Root url REST resources."""
import flask
import flask_restful
from .jobs import Jobs
from .spec import Spec
class Home(flask_restful.Resource):
"""Home url REST resource."""
def get(self):
"""
Home
Available endpoints for the web api.
---
tags:
- docs
produces:
- application/json
responses:
200:
description: List of available endpoints
"""
return {
'r | esources': [
{
'link': {
'rel': 'jobs',
'title': 'Jobs',
'href': flask.url_for(Jobs.__name__.lower()),
},
},
{
'link': {
'rel': 'spec',
'title': 'Spec',
| 'href': flask.url_for(Spec.__name__.lower()),
},
}
],
}
|
danhuss/faker | faker/providers/person/fr_FR/__init__.py | Python | mit | 10,085 | 0.003793 | from .. import Provider as PersonProvider
class Provider(PersonProvider):
formats_female = (
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{prefix}} {{last_name}}',
'{{first_name_female}} {{last_name}}-{{last_name}}',
'{{first_name_female}}-{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}} {{prefix}} {{last_name}}',
)
formats_male = (
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{prefix}} {{last_name}}',
'{{first_name_male}} {{last_name}}-{{last_name}}',
'{{first_name_male}}-{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}} {{prefix}} {{last_name}}',
)
formats = formats_male + formats_female
first_names_male = (
'Adrien',
'Aimé',
'Alain',
'Alexandre',
'Alfred',
'Alphonse',
'André',
'Antoine',
'Arthur',
'Auguste',
'Augustin',
'Benjamin',
'Benoît',
'Bernard',
'Bertrand',
'Charles',
'Christophe',
'Daniel',
'David',
'Denis',
'Édouard',
'Émile',
'Emmanuel',
'Éric',
'Étienne',
'Eugène',
'François',
'Franck',
'Frédéric',
'Gabriel',
'Georges',
'Gérard',
'Gilbert',
'Gilles',
'Grégoire',
'Guillaume',
'Guy',
'William',
'Henri',
'Honoré',
'Hugues',
'Isaac',
'Jacques',
'Jean',
'Jérôme',
'Joseph',
'Jules',
'Julien',
'Laurent',
'Léon',
'Louis',
'Luc',
'Lucas',
'Marc',
'Marcel',
'Martin',
'Matthieu',
'Maurice',
'Michel',
'Nicolas',
'Noël',
'Olivier',
'Patrick',
'Paul',
'Philippe',
'Pierre',
'Raymond',
'Rémy',
'René',
'Richard',
'Robert',
'Roger',
'Roland',
'Sébastien',
'Stéphane',
'Théodore',
'Théophile',
'Thibaut',
'Thibault',
'Thierry',
'Thomas',
'Timothée',
'Tristan',
'Victor',
'Vincent',
'Xavier',
'Yves',
'Zacharie')
first_names_female = (
'Adélaïde',
'Adèle',
'Adrienne',
'Agathe',
'Agnès',
'Aimée',
'Alexandrie',
'Alix',
'Alexandria',
'Alex',
'Alice',
'Amélie',
'Anaïs',
'Anastasie',
'Andrée',
'Anne',
'Anouk',
'Antoinette',
'Arnaude',
'Astrid',
'Audrey',
'Aurélie',
'Aurore',
'Bernadette',
'Brigitte',
'Capucine',
'Caroline',
'Catherine',
'Cécile',
'Céline',
'Célina',
'Chantal',
'Charlotte',
'Christelle',
'Christiane',
'Christine',
'Claire',
'Claudine',
'Clémence',
'Colette',
'Constance',
'Corinne',
'Danielle',
'Denise',
'Diane',
'Dorothée',
'Édith',
'Éléonore',
'Élisabeth',
'Élise',
'Élodie',
'Émilie',
'Emmanuelle',
'Françoise',
'Frédérique',
'Gabrielle',
'Geneviève',
'Hélène',
'Henriette',
'Hortense',
'Inès',
'Isabelle',
'Jacqueline',
'Jeanne',
'Jeannine',
'Joséphine',
'Josette',
'Julie',
'Juliette',
'Laetitia',
'Laure',
'Laurence',
'Lorraine',
'Louise',
'Luce',
'Lucie',
'Lucy',
'Madeleine',
'Manon',
'Marcelle',
'Margaux',
'Margaud',
'Margot',
'Marguerite',
'Margot',
'Margaret',
'Maggie',
'daisy',
'Marianne',
'Marie',
'Marine',
'Marthe',
'Martine',
'Maryse',
'Mathilde',
'Michèle',
'Michelle',
'Michelle',
'Monique',
'Nathalie',
'Nath',
'Nathalie',
'Nicole',
'Noémi',
'Océane',
'Odette',
'Olivie',
'Patricia',
'Paulette',
'Pauline',
'Pénélope',
'Philippine',
'Renée',
'Sabine',
'Simone',
'Sophie',
'Stéphanie',
'Susanne',
'Suzanne',
'Susan',
'Suzanne',
'Sylvie',
'Thérèse',
'Valentine',
'Valérie',
'Véronique',
'Victoire',
'Virginie',
'Zoé',
'Camille',
'Claude',
'Dominique')
first_names = first_names_male + first_names_female
last_names = (
'Martin', 'Bernard', 'Thomas', 'Robert', 'Petit', 'Dubois', 'Richard', 'Garcia', 'Durand', 'Moreau', 'Lefebvre',
'Simon', 'Laurent', 'Michel', 'Leroy', 'Martinez', 'David', 'Fontaine', 'Da Silva', 'Morel', 'Fournier',
'Dupont', 'Bertrand', 'Lambert', 'Rousseau', 'Girard', 'Roux', 'Vincent', 'Lefevre', 'Boyer', 'Lopez', 'Bonnet',
'Andre', 'Francois', 'Mercier', 'Muller', 'Guerin', 'Legrand', 'Sanchez', 'Garnier', 'Chevalier', 'Faure',
'Perez', 'Clement', 'Fernandez', 'Blanc', 'Robin', 'Morin', 'Gauthier', 'Pereira', 'Perrin', 'Roussel', 'Henry',
'Duval', 'Gautier', 'Nicolas', 'Masson', 'Marie', 'Noel', 'Ferreira', 'Lemaire', 'Mathieu', 'Riviere', 'Denis',
'Marchand', 'Rodriguez', 'Dumont', 'Payet', 'Lucas', 'Dufour', 'Dos Santos', 'Joly', 'Blanchard', 'Meunier',
'Rodrigues', 'Caron', 'Gerard', 'Fernandes', 'Brunet', 'Meyer', 'Barbier', 'Leroux', 'Renard', 'Goncalves',
'Gaillard', 'Brun', 'Roy', 'Picard', 'Giraud', 'Roger', 'Schmitt', 'Colin', 'Arnaud', 'Vidal', 'Gonzalez',
'Lemoine', 'Roche', 'Aubert', 'Olivier', 'Leclercq', 'Pierre', 'Philippe', 'Bourgeois', 'Renaud', 'Martins',
'Leclerc', 'Guillaume', 'Lacroix', 'Lecomte', 'Benoit', 'Fabre', 'Carpentier', 'Vasseur', 'Louis', 'Hubert',
'Jean', 'Dumas', 'Rolland', 'Grondin', 'Rey', 'Huet', 'Gomez', 'Dupuis', 'Guillot', 'Berger', 'Moulin',
'Hoarau', 'Menard', 'Deschamps', 'Fleury', 'Adam', 'Boucher', 'Poirier', 'Bertin', 'Charles', 'Aubry',
'Da Costa', 'Royer', 'Dupuy', 'Maillard', 'Paris', 'Baron', 'Lopes', 'Guyot', 'Carre', 'Jacquet', 'Renault',
'Herve', 'Charpentier', 'Klein', 'Cousin', 'Collet', 'Leger', 'Ribeiro', 'Hernandez', 'Bailly', 'Schneider',
'Le Gall', 'Ruiz', 'Langlois', 'Bouvier', 'Gomes', 'Prevost', 'Julien', 'Lebrun', 'Breton', 'Germain', 'Millet',
'Boulanger', 'Remy', 'Le Roux', 'Daniel', 'Marques', 'Maillot', 'Leblanc', 'Le Goff', 'Barre', 'Perrot',
'Leveque', 'Marty', 'Benard', 'Monnier', 'Hamon', 'Pelletier | ', 'Alves | ', 'Etienne', 'Marchal', 'Poulain',
'Tessier', 'Lemaitre', 'Guichard', 'Besson', 'Mallet', 'Hoareau', 'Gillet', 'Weber', 'Jacob', 'Collin',
'Chevallier', 'Perrier', 'Michaud', 'Carlier', 'Delaunay', 'Chauvin', 'Alexandre', 'Marechal', 'Antoine',
'Lebon', 'Cordier', 'Lejeune', 'Bouchet', 'Pasquier', 'Legros', 'Delattre', 'Humbert', 'De Oliveira', 'Briand',
'Lamy', 'Launay', 'Gilbert', 'Perret', 'Lesage', 'Gay', 'Nguyen', 'Navarro', 'Besnard', 'Pichon', 'Hebert',
'Cohen', 'Pons', 'Lebreton', 'Sauvage', 'De Sousa', 'Pineau', 'Albert', 'Jacques', 'Pinto', 'Barthelemy',
'T |
solarpermit/solarpermit | website/migrations/0043_auto__add_field_userdetail_display_preference.py | Python | bsd-3-clause | 48,128 | 0.007293 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'UserDetail.display_preference'
db.add_column('website_userdetail', 'display_preference',
self.gf('django.db.models.fields.CharField')(max_length=16, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'UserDetail.display_preference'
db.delete_column('website_userdetail', 'display_preference')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'website.action': {
'Meta': {'object_name': 'Action'},
'action_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.ActionCategory']", 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'entity_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingLevel']", 'null': 'True', 'blank': 'True'}),
'question_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.QuestionCategory']", 'null': 'True', 'blank': 'True'}),
'scale': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.actioncategory': {
'Meta': {'object_name': 'ActionCategory'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'rating_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingCategory']", 'null': 'True', 'blank': 'True'})
},
'website.actiontutorial': {
'Meta': {'object_name': 'ActionTutorial'},
'action_identifier': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Tutorial']", 'null': 'True', 'blank': 'True'})
},
'website.address': {
'Meta': {'object_name': 'Address'},
'address1': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'address2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'website.answerchoice': {
'Meta': {'object_name': 'AnswerChoice'},
'answer_choice_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.AnswerChoiceGroup']"}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
| 'label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'bla | nk': |
ema/conpaas | conpaas-services/src/conpaas/core/ganglia.py | Python | bsd-3-clause | 5,129 | 0.007019 | # -*- coding: utf-8 -*-
"""
conpaas.core.ganglia
====================
ConPaaS core: performance monitoring with Ganglia.
:copyright: (C) 2010-2013 by Contrail Consortium.
"""
import os
from shutil import copyfile, copy
from Cheetah.Template import Template
from conpaas.core.misc import run_cmd
class BaseGanglia(object):
"""Basic Ganglia configuration and startup. Valid for both managers and
agents. Not to be used directly!"""
GANGLIA_ETC = '/etc/ganglia'
GANGLIA_CONFD = os.path.join(GANGLIA_ETC, 'conf.d')
GMOND_CONF = os.path.join(GANGLIA_ETC, 'gmond.conf')
GANGLIA_MODULES_DIR = '/usr/lib/ganglia/python_modules/'
def __init__(self):
"""Set basic values"""
self.cluster_name = 'conpaas'
self.setuid = 'no'
self.host_dmax = 300
# Set by subclasses
self.manager_ip = None
self.cps_home = None
def configure(self):
"""Create Ganglia configuration. Gmond is needed by managers and
agents."""
try:
if not os.path.isdir(self.GANGLIA_CONFD):
os.mkdir(self.GANGLIA_CONFD)
if not os.path.isdir(self.GANGLIA_MODULES_DIR):
os.mkdir(self.GANGLIA_MODULES_DIR)
except OSError:
pass
# Copy modpython.conf
src = os.path.join(self.cps_home, 'contrib', 'ganglia_modules',
'modpython.conf')
copy(src, self.GANGLIA_CONFD)
# Write gmond.conf
values = {
'clusterName': self.cluster_name, 'setuid': self.setuid,
'hostdmax': self.host_dmax, 'managerIp': self.manager_ip
}
src = open(os.path.join(self.cps_home, 'config', 'ganglia',
'ganglia-gmond.tmpl')).read()
open(self.GMOND_CONF, 'w').write(str(Template(src, values)))
def add_modules(self, modules):
"""Install additional modules and restart ganglia-monitor"""
for module in modules:
# Copy conf files into ganglia conf.d
filename = os.path.join(self.cps_home, 'contrib',
'ganglia_modules', module + '.pyconf')
copy(filename, os.path.join(self.GANGLIA_CONFD, module + '.conf'))
# Copy python modules
filename = os.path.join(self.cps_home, 'contrib',
'ganglia_modules', module + '.py')
copy(filename, self.GANGLIA_MODULES_DIR)
# Restart ganglia-monitor
run_cmd('/etc/init.d/ganglia-monitor restart')
def start(self):
"""Services startup"""
_, err = run_cmd('/etc/init.d/ganglia-monitor start')
if err:
return 'Error starting ganglia-monitor: %s' % err
class ManagerGanglia(BaseGanglia):
GMETAD_CONF = '/etc/ganglia/gmetad.conf'
def __init__(self, config_parser):
"""Same as for the base case, but with localhost as manager_ip"""
BaseGanglia.__init__(self)
self.service_type = config_parser.get('manager','TYPE')
self.manager_ip = '127.0.0.1'
self.cps_home = config_parser.get('manager', 'CONPAAS_HOME')
def configure(self):
"""Here we also need to configure gmetad and the ganglia frontend"""
BaseGanglia.configure(self)
# Write gmetad.conf
src = open(os.path.join(self.cps_home, 'config', 'ganglia',
'ganglia-gmetad.tmpl')).read()
tmpl = Template(src, { 'clusterName': self.cluster_name })
open(self.GMETAD_CONF, 'w').write(str(tmpl))
# Frontend configuration
if not os.path.isdir('/var/www'):
os.mkdir('/var/www')
run_cmd('cp -a /root/ConPaaS/contrib/ganglia_frontend/ganglia /var/www')
copy(os.path.join(self.cps_home, 'contrib', 'ganglia_modules',
'nginx-manager.conf'), '/var/cache/cpsagent')
copy('/etc/nginx/fastcgi_params', '/var/cache/cpsagent/')
copy(os.path.join(self.cps_home, 'contrib', 'ganglia_modules',
'www.conf'), '/etc/php5/fpm/pool.d/')
copyfile(os.path.join(self.cps_home, 'config', 'ganglia',
'ganglia_frontend.tmpl'), '/etc/nginx/nginx.conf')
if 'php' in self.service_type:
modules = ['num_machines_provisioning']
BaseGanglia.add_modules(self, modules )
def start(self):
"""We also need to start gmetad, php5-fpm and nginx"""
err = BaseGan | glia.start(self)
if err:
return err
| cmds = ( '/etc/init.d/gmetad start',
'/etc/init.d/php5-fpm start',
'/usr/sbin/nginx -c /var/cache/cpsagent/nginx-manager.conf' )
for cmd in cmds:
_, err = run_cmd(cmd)
if err:
return "Error executing '%s': %s" % (cmd, err)
class AgentGanglia(BaseGanglia):
def __init__(self, config_parser):
"""Same as for the base case, but with proper manager_ip"""
BaseGanglia.__init__(self)
self.manager_ip = config_parser.get('agent', 'IP_WHITE_LIST')
self.cps_home = config_parser.get('agent', 'CONPAAS_HOME')
|
cfe-lab/Kive | kive/container/tests_mock.py | Python | bsd-3-clause | 39,450 | 0.000406 | import os
from argparse import Namespace
import tempfile
import io
import zipfile
import tarfile
import json
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.files.base import File
from django.test import TestCase
from django.urls import reverse, resolve
from django_mock_queries.mocks import mocked_relations
from mock import patch, Mock, call
from rest_framework.test import force_authenticate
from container.ajax import ContainerAppViewSet
from container.management.commands import runcontainer
from container.models import Container, ContainerFamily, ContainerApp, \
ContainerArgument, ContainerRun, ContainerDataset, ZipHandler, TarHandler
from kive.tests import BaseTestCases, strip_removal_plan
from librarian.models import Dataset
from metadata.models import KiveUser
EXPECTED_MANAGE_PATH = os.path.abspath(os.path.join(__file__,
'../../manage.py'))
@mocked_relations(Container, ContainerFamily)
class ContainerFamilyMockTests(TestCase):
def test_removal(self):
family = ContainerFamily(id=42)
expected_plan = {'ContainerFamilies': {family}}
plan = family.build_removal_plan()
self.assertEqual(expected_plan, strip_removal_plan(plan))
def test_removal_with_app(self):
family = ContainerFamily(id=42)
container = family.containers.create(id=43)
expected_plan = {'ContainerFamilies': {family},
'Containers': {container}}
plan = family.build_removal_plan()
self.assertEqual(expected_plan, strip_removal_plan(plan))
@mocked_relations(Container, ContainerFamily, ContainerApp)
class ContainerMockTests(TestCase):
def test_str(self):
family = ContainerFamily(name='Spline Reticulator')
container = Container(tag='v1.0.7', family=family)
s = str(container)
self.assertEqual("Spline Reticulator:v1.0.7", s)
def test_removal(self):
container = Container(id=42)
expected_plan = {'Containers': {container}}
plan = container.build_removal_plan()
self.assertEqual(expected_plan, strip_removal_plan(plan))
def test_removal_with_app(self):
container = Container(id=42)
app = container.apps.create(id=43)
expected_plan = {'Containers': {container},
'ContainerApps': {app}}
plan = container.build_removal_plan()
self.assertEqual(expected_plan, strip_removal_plan(plan))
def test_removal_with_child(self):
container = Container(id=42)
child_container = container.children.create(id=43)
expected_plan = {'Containers': {container, child_container}}
plan = container.build_removal_plan()
self.assertEqual(expected_plan, strip_removal_plan(plan))
# noinspection DuplicatedCode
class ContainerCleanMockTests(TestCase):
def setUp(self):
super(ContainerCleanMockTests, self).setUp()
self.alpine_path = os.path.abspath(os.path.join(
__file__,
'..',
'..',
'..',
'samplecode',
'singularity',
'python2-alpine-trimmed.simg'))
# Some files to put into archives.
self.useless = u"foobar"
fd, self.useless_file = tempfile.mkstemp()
with io.open(fd, mode="w") as f:
f.write(self.useless)
self.hello_world_script = u"""\
#! /bin/bash
echo Hello World
"""
hello_world_fd, self.hello_world_filename = tempfile.mkstemp()
with io.open(hello_world_fd, mode="w") as f:
f.write(self.hello_world_script)
self.not_a_script = u"""\
This is not a driver.
"""
not_a_script_fd, self.not_a_script_filename = tempfile.mkstemp()
with io.open(not_a_script_fd, mode="w") as f:
f.write(self.not_a_script)
_, self.empty_file = tempfile.mkstemp()
# Proper archives that contain a single driver.
_, self.zip_archive = tempfile.mkstemp()
_, self.tar_archive = tempfile.mkstemp()
with zipfile.ZipFile(self.zip_archive, mode="w") as z:
z.write(self.hello_world_filename, arcname="hello_world.sh")
with tarfile.open(self.tar_archive, mode="w") as t:
t.add(self.hello_world_filename, arcname="hello_world.sh")
# Improper archives that do not contain anything.
_, self.empty_zip_archive = tempfile.mkstemp()
| _, self.empty_tar_archive = tempfile.m | kstemp()
with zipfile.ZipFile(self.empty_zip_archive, mode="w"):
pass
with tarfile.open(self.empty_tar_archive, mode="w"):
pass
# Improper archives that contain no drivers.
_, self.no_driver_zip = tempfile.mkstemp()
_, self.no_driver_tar = tempfile.mkstemp()
with zipfile.ZipFile(self.no_driver_zip, mode="w") as z:
z.write(self.not_a_script_filename, arcname="hello_world.sh")
with tarfile.open(self.no_driver_tar, mode="w") as t:
t.add(self.not_a_script_filename, arcname="hello_world.sh")
def tearDown(self):
os.remove(self.hello_world_filename)
os.remove(self.useless_file)
os.remove(self.not_a_script_filename)
os.remove(self.empty_file)
os.remove(self.zip_archive)
os.remove(self.tar_archive)
os.remove(self.empty_zip_archive)
os.remove(self.empty_tar_archive)
os.remove(self.no_driver_zip)
os.remove(self.no_driver_tar)
def test_validate_singularity_container_pass(self):
"""
A proper Singularity container should pass validation.
:return:
"""
Container.validate_singularity_container(self.alpine_path)
def test_validate_singularity_container_fail(self):
"""
A non-Singularity container should raise an error.
:return:
"""
with self.assertRaisesMessage(
ValidationError,
Container.DEFAULT_ERROR_MESSAGES["invalid_singularity_container"]
):
Container.validate_singularity_container(self.useless_file)
def test_clean_good_singularity_image(self):
"""
A proper Singularity container should pass validation.
:return:
"""
container = Container(id=42)
container.file_type = Container.SIMG
with open(self.alpine_path, 'rb') as alpine_file:
container.file = File(alpine_file)
container.clean()
def test_clean_singularity_image_with_parent(self):
"""
A Singularity container should not have a parent.
:return:
"""
parent = Container(id=41)
container = Container(id=42, parent=parent, file_type=Container.SIMG)
with open(self.alpine_path, "rb") as alpine_file:
container.file = File(alpine_file)
with self.assertRaisesMessage(
ValidationError,
Container.DEFAULT_ERROR_MESSAGES["singularity_cannot_have_parent"]
):
container.clean()
def test_good_zip_archive(self):
"""
A good zip archive container passes validation.
:return:
"""
parent = Container(id=41, file_type=Container.SIMG)
container = Container(id=42, file_type=Container.ZIP, parent=parent)
with open(self.zip_archive, "rb") as zip_archive:
container.file = File(zip_archive)
container.clean()
def test_good_tar_archive(self):
"""
A good tar archive container passes validation.
:return:
"""
parent = Container(id=41, file_type=Container.SIMG)
container = Container(id=42, file_type=Container.TAR, parent=parent)
with open(self.tar_archive, "rb") as tar_archive:
container.file = File(tar_archive)
container.clean()
def test_archive_with_no_parent(self):
"""
An archive container must have a parent.
:return:
"""
container = Container(id=42, file_type=Container.ZIP)
with open(self |
wixenius/Project_Euler | 13.py | Python | mit | 5,198 | 0.000192 | s = """37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
74324986199524741059474233309513058123726617309629
91942213363574161572522430563301811072406154908250
23067588207539346171171980310421047513778063246676
89261670696623633820136378418383684178734361726757
28112879812849979408065481931592621691275889832738
44274228917432520321923589422876796487670272189318
47451445736001306439091167216856844588711603153276
70386486105843025439939619828917593665686757934951
62176457141856560629502157223196586755079324193331
64906352462741904929101432445813822663347944758178
92575867718337217661963751590579239728245598838407
58203565325359399008402633568948830189458628227828
80181199384826282014278194139940567587151170094390
35398664372827112653829987240784473053190104293586
86515506006295864861532075273371959191420517255829
71693888707715466499115593487603532921714970056938
54370070576826684624621495650076471787294438377604
53282654108756828443191190634694037855217779295145
36123272525000296071075082563815656710885258350721
45876576172410976447339110607218265236877223636045
17423706905851860660448207621209813287860733969412
81142660418086830619328460811191061556940512689692
51934325451728388641918047049293215058642563049483
62467221648435076201727918039944693004732956340691
15732444386908125794514089057706229429197107928209
55037687525678773091862540744969844508330393682126
18336384825330154686196124348767681297534375946515
80386287592878490201521685554828717201219257766954
78182833757993103614740356856449095527097864797581
16726320100436897842553539920931837441497806860984
48403098129077791799088218795327364475675590848030
87086987551392711854517078544161852424320693150332
59959406895756536782107074926966537676326235447210
69793950679652694742597709739166693763042633987085
41052684708299085211399427365734116182760315001271
65378607361501080857009149939512557028198746004375
35829035317434717326932123578154982629742552737307
94953759765105305946966067683156574377167401875275
88902802571733229619176668713819931811048770190271
25267680276078003013678680992525463401061632866526
36270218540497705585629946580636237993140746255962
24074486908231174977792365466257246923322810917141
91430288197103288597806669760892938638285025333403
34413065578016127815921815005561868836468420090470
23053081172816430487623791969842487255036638784583
11487696932154902810424020138335124462181441773470
63783299490636259666498587618221225225512486764533
67720186971698544312419572409913959008952310058822
95548255300263520781532296796249481641953868218774
76085327132285723110424803456124867697064507995236
37774242535411291684276865538926205024910326572967
23701913275725675285653248258265463092207058596522
29798860272258331913126375147341994889534765745501
18495701454879288984856827726077713721403798879715
38298203783031473527721580348144513491373226651381
34829543829199918180278916522431027392251122869539
40957953066405232632538044100059654939159879593635
29746152185502371307642255121183693803580388584903
41698116222072977186158236678424689157993532961922
62467957194401269043877107275048102390895523597457
23189706772547915061505504953922979530901129967519
86188088225875314529584099251203829009407770775672
11306739708304724483816533873502340845647058077308
82959174767140363198008187129011875491310547126581
97623331044818386269515456334926366572897563400500
42846280183517070527831839425882145521227251250327
55121603546981200581762165212827652751691296897789
32238195734329339946437501907836945765883352399886
75506164965184775180738168837861091527357929701337
6217784275 | 2192623401942399639168044983993173312731
32924185707147349566916674687634660915035914677504
99518671430235219628894890102423325116913619626622
73267460800591547471830798392868535206946944540724
76841822524674 | 417161514036427982273348055556214818
97142617910342598647204516893989422179826088076852
87783646182799346313767754307809363333018982642090
10848802521674670883215120185883543223812876952786
71329612474782464538636993009049310363619763878039
62184073572399794223406235393808339651327408011116
66627891981488087797941876876144230030984490851411
60661826293682836764744779239180335110989069790714
85786944089552990653640447425576083659976645795096
66024396409905389607120198219976047599490197230297
64913982680032973156037120041377903785566085089252
16730939319872750275468906903707539413042652315011
94809377245048795150954100921645863754710598436791
78639167021187492431995700641917969777599028300699
15368713711936614952811305876380278410754449733078
40789923115535562561142322423255033685442488917353
44889911501440648020369068063960672322193204149535
41503128880339536053299340368006977710650566631954
81234880673210146739058568557934581403627822703280
82616570773948327592232845941706525094512325230608
22918802058777319719839450180888072429661980811197
77158542502016545090413245809786882778948721859617
72107838435069186155435662884062257473692284509516
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690"""
l = [int(line) for line in s.split('\n')]
sum = 0
for x in l:
sum += x
print(sum)
|
jgmize/kuma | kuma/users/templatetags/jinja_helpers.py | Python | mpl-2.0 | 5,220 | 0.000383 | from allauth.account.utils import user_display
from allauth.socialaccount import providers
from allauth.socialaccount.templatetags.socialaccount import get_providers
from allauth.utils import get_request_param
from django.conf import settings
from django.contrib import admin
from django.utils.translation import ugettext
from django_jinja import library
from honeypot.templatetags.honeypot import render_honeypot_field
from jinja2 import Markup, contextfunction, escape
from kuma.core.templatetags.jinja_helpers import datetimeformat
from kuma.core.urlresolvers import reverse
from ..jobs import UserGravatarURLJob
@library.global_function
def gravatar_url(email, secure=True, size=220, rating='pg',
default=settings.DEFAULT_AVATAR):
job = UserGravatarURLJob()
return job.get(email, secure=secure, size=size,
rating=rating, default=default)
@library.global_function
@contextfunction
def ban_link(context, ban_user, banner_user):
"""Returns a link to ban a user"""
link = ''
if ban_user.id != banner_user.id and banner_user.has_perm('users.add_userban'):
active_ban = ban_user.active_ban
if active_ban:
url = reverse('admin:users_userban_change', args=(active_ban.id,))
title = ugettext('Banned on %(ban_date)s by %(ban_admin)s.') % {
'ban_date': datetimeformat(context, active_ban.date,
format='date', output='json'),
'ban_admin': active_ban.by,
}
link = ('<a href="%s" class="button ban-link" title="%s">%s'
'<i aria-hidden="true" class="icon-ban"></i></a>'
% (url, title, ugettext('Banned')))
else:
url = reverse('users.ban_user', kwargs={'user_id': ban_user.id})
link = ('<a href="%s" class="button negative ban-link">%s'
'<i aria-hidden="true" class="icon-ban"></i></a>'
% (url, ugettext('Ban User')))
| return Markup(link)
@library.global_function
def admin_link(user):
"""Returns a link to admin a user"""
url = reverse('admin:users_user_change', args=(user.id,),
| current_app=admin.site.name)
link = ('<a href="%s" class="button neutral">%s'
'<i aria-hidden="true" class="icon-wrench"></i></a>' %
(url, ugettext('Admin')))
return Markup(link)
@library.filter
def public_email(email):
"""Email address -> publicly displayable email."""
return Markup('<span class="email">%s</span>' % unicode_to_html(email))
def unicode_to_html(text):
"""Turns all unicode into html entities, e.g. E -> E."""
return ''.join([u'&#%s;' % ord(i) for i in text])
@library.global_function
def user_list(users):
"""Turn a list of users into a list of links to their profiles."""
link = u'<a href="%s">%s</a>'
list = u', '.join([link % (escape(u.get_absolute_url()), escape(u.username)) for
u in users])
return Markup(list)
# Returns a string representation of a user
library.global_function(user_display)
# Returns a list of social authentication providers.
library.global_function(get_providers)
@library.global_function
@contextfunction
def provider_login_url(context, provider_id, **params):
"""
{{ provider_login_url("github", next="/some/url") }}
{{ provider_login_url("persona", next="/some/other/url") }}
"""
request = context['request']
provider = providers.registry.by_id(provider_id)
auth_params = params.get('auth_params', None)
scope = params.get('scope', None)
process = params.get('process', None)
if scope is '':
del params['scope']
if auth_params is '':
del params['auth_params']
if 'next' not in params:
next = get_request_param(request, 'next')
if next:
params['next'] = next
elif process == 'redirect':
params['next'] = request.get_full_path()
else:
if not params['next']:
del params['next']
# get the login url and append params as url parameters
return Markup(provider.get_login_url(request, **params))
@library.global_function
@contextfunction
def providers_media_js(context):
"""
{{ providers_media_js() }}
"""
request = context['request']
return Markup(u'\n'.join([p.media_js(request)
for p in providers.registry.get_list()]))
@library.global_function
def social_accounts(user):
"""
{% set accounts = social_accounts(user) %}
Then:
{{ accounts.twitter }} -- a list of connected Twitter accounts
{{ accounts.twitter.0 }} -- the first Twitter account
{% if accounts %} -- if there is at least one social account
"""
accounts = {}
if not user.is_authenticated():
return accounts
for account in user.socialaccount_set.all().iterator():
providers = accounts.setdefault(account.provider, [])
providers.append(account)
return accounts
@library.global_function
@library.render_with('honeypot/honeypot_field.html')
def honeypot_field(field_name=None):
return render_honeypot_field(field_name)
|
vijos/vj4 | vj4/model/user.py | Python | agpl-3.0 | 6,864 | 0.014423 | import datetime
from pymongo import errors
from pymongo import ReturnDocument
from vj4 import db
from vj4 import error
from vj4.model import builtin
from vj4.util import argmethod
from vj4.util import pwhash
from vj4.util import validator
PROJECTION_PUBLIC = {'_id': 1,
'uname': 1,
'uname_lower': 1,
'gravatar': 1}
PROJECTION_VIEW = {'salt': 0, 'hash': 0}
PROJECTION_ALL = None
@argmethod.wrap
async def add(uid: int, uname: str, password: str, mail: str, regip: str=''):
"""Add a user."""
validator.check_uname(uname)
# TODO(iceboy): Filter uname by keywords.
validator.check_password(password)
validator.check_mail(mail)
uname_lower = uname.strip().lower()
mail_lower = mail.strip().lower()
for user in builtin.USERS:
if user['_id'] == uid or user['uname_lower'] == uname_lower or user['mail_lower'] == mail_lower:
raise error.UserAlreadyExistError(uname)
salt = pwhash.gen_salt()
coll = db.coll('user')
try:
await coll.insert_one({'_id': uid,
'uname': uname,
'uname_lower': uname_lower,
'mail': mail,
'mail_lower': mail_lower,
'salt': salt,
'hash': pwhash.hash_vj4(password, salt),
'regat': datetime.datetime.utcnow(),
'regip': regip,
'priv': builtin.DEFAULT_PRIV,
'loginat': datetime.datetime.utcnow(),
'loginip': regip,
'gravatar': mail})
except errors.DuplicateKeyError:
raise error.UserAlreadyExistError(uid, uname, mail) from None
@argmethod.wrap
async def get_by_uid(uid: int, fields=PROJECTION_VIEW):
"""Get a user by uid."""
for user in builtin.USERS:
if user['_id'] == uid:
return user
coll = db.coll('user')
return await coll.find_one({'_id': uid}, fields)
@argmethod.wrap
async def get_by_uname(uname: str, fields=PROJECTION_VIEW):
"""Get a user by uname."""
uname_lower = uname.strip().lower()
for user in builtin.USERS:
if user['uname_lower'] == uname_lower:
return user
coll = db.coll('user')
return await coll.find_one({'uname_lower': uname_lower}, fields)
@argmethod.wrap
async def get_by_mail(mail: str, fields=PROJECTION_VIEW):
"""Get a user by mail."""
mail_lower = mail.strip().lower()
for user in builtin.USERS:
if user['mail_lower'] == mail_lower:
return user
coll = db.coll('user')
return await coll.find_one({'mail_lower': mail_lower}, fields)
def get_multi(*, fields=PROJECTION_VIEW, **kwargs):
"""Get multiple users."""
coll = db.coll('user')
return coll.find(kwargs, fields)
async def get_dict(uids, *, fields=PROJECTION_VIEW):
uid_set = set(uids)
result = dict()
for doc in builtin.USERS:
if doc['_id'] in uid_set:
result[doc['_id']] = doc
uid_set.remove(doc['_id'])
async for doc in get_multi(_id={'$in': list(uid_set)}, fields=fields):
result[doc['_id']] = doc
return result
@argmethod.wrap
async def check_password_by_uid(uid: int, password: str):
"""Check password. Returns doc or None."""
doc = await get_by_uid(uid, PROJECTION_ALL)
if doc and pwhash.check(password, doc['salt'], doc['hash']):
return doc
@argmethod.wrap
async def check_password_by_uname(uname: str, password: str, auto_upgrade: bool=False):
"""Check password. Returns doc or None."""
doc = await get_by_uname(uname, PROJECTION_ALL)
if not doc:
raise error.UserNotFoundError(uname)
if pwhash.check(password, doc['salt'], doc['hash']):
if auto_upgrade and pwhash.need_upgrade(doc['hash']) \
and validator.is_password(password):
await set_password(doc['_id'], password)
return doc
@argmethod.wrap
async def set_password(uid: int, password: str):
"""Set password. Returns doc or None."""
validator.check_password(password)
salt = pwhash.gen_salt()
coll = db.coll('user')
doc = await coll.find_one_and_update(filter={'_id': uid},
update={'$set': {'salt': salt,
'hash': pwhash.hash_vj4(password, salt)}},
return_document=ReturnDocument.AFTER)
return doc
@argmethod.wrap
async | def set_mail(uid: int, mail: str):
"""Set mail. Returns doc or None."""
validator.check_mail(mail)
return await set_by_uid(uid, mail=mail, mail_lower=mail.strip().lower())
@argmethod.wrap
async def change_password(uid: int, current_password: str, password: str):
"""Change password. Returns doc or None."""
doc = await check_password_by_uid(uid, current_password)
if not doc:
return None
validator.check_password(password)
salt = pwhash.gen_salt()
coll = db.coll('user')
do | c = await coll.find_one_and_update(filter={'_id': doc['_id'],
'salt': doc['salt'],
'hash': doc['hash']},
update={'$set': {'salt': salt,
'hash': pwhash.hash_vj4(password, salt)}},
return_document=ReturnDocument.AFTER)
return doc
async def set_by_uid(uid, **kwargs):
coll = db.coll('user')
doc = await coll.find_one_and_update(filter={'_id': uid}, update={'$set': kwargs}, return_document=ReturnDocument.AFTER)
return doc
@argmethod.wrap
async def set_priv(uid: int, priv: int):
"""Set privilege. Returns doc or None."""
return await set_by_uid(uid, priv=priv)
@argmethod.wrap
async def set_superadmin(uid: int):
return await set_priv(uid, builtin.PRIV_ALL)
set_superadmin = None
@argmethod.wrap
async def set_judge(uid: int):
return await set_priv(uid, builtin.JUDGE_PRIV)
@argmethod.wrap
async def set_default(uid: int):
return await set_priv(uid, builtin.DEFAULT_PRIV)
@argmethod.wrap
async def get_prefix_list(prefix: str, fields=PROJECTION_VIEW, limit: int=50):
prefix = prefix.lower()
regex = r'\A\Q{0}\E'.format(prefix.replace(r'\E', r'\E\\E\Q'))
coll = db.coll('user')
udocs = await coll.find({'uname_lower': {'$regex': regex}}, projection=fields) \
.limit(limit) \
.to_list()
for udoc in builtin.USERS:
if udoc['uname_lower'].startswith(prefix):
udocs.append(udoc)
return udocs
@argmethod.wrap
async def count(**kwargs):
coll = db.coll('user')
return coll.find({**kwargs}).count()
@argmethod.wrap
async def ensure_indexes():
coll = db.coll('user')
await coll.create_index('uname_lower', unique=True)
await coll.create_index('mail_lower', sparse=True)
if __name__ == '__main__':
argmethod.invoke_by_args()
|
EricMuller/mynotes-backend | requirements/twisted/Twisted-17.1.0/src/twisted/mail/test/test_bounce.py | Python | mit | 1,029 | 0.002915 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""Test cases for bounce message generation
"""
from twisted.trial import unittest
from twisted.mail import bounce
import cStringIO
import email.message
import email.parser
class BounceTests(unittest.TestCase):
"""
testcases for bounce message generation
"""
def testBounceFormat(self):
from_, to, s = bounce.generateBounce(cStringIO.StringIO('''\
From: Moshe Zadka <moshez@example.com>
To: nonexistent@example.org
Subject: test
'''), 'moshez@example.com', 'nonexistent@example.org')
self.assertEqual(from_, '')
self.assertEqual(to, 'moshez@example.com')
emailParser = email.parser.Parser()
mess = emailParser.parse(cStringIO.StringIO(s))
self.assertEqual(mess['To'], 'moshez@example.com')
self.assertEqual(mess['From'], 'postmaster@example.org')
self.assertEqual(mess[' | subject'], 'Returned Mai | l: see transcript for details')
def testBounceMIME(self):
pass
|
michaelpacer/dynd-python | dynd/tests/test_range_linspace.py | Python | bsd-2-clause | 5,966 | 0.003185 | import sys
import unittest
from dynd import nd, ndt
class TestArange(unittest.TestCase):
def test_simple(self):
self.assertEqual(nd.as_py(nd.range(10)), list(range(10)))
self.assertEqual(nd.as_py(nd.range(5, 10)), list(range(5, 10)))
self.assertEqual(nd.as_py(nd.range(5, 10, 3)), list(range(5, 10, 3)))
self.assertEqual(nd.as_py(nd.range(10, 5, -1)), list(range(10, 5, -1)))
self.assertEqual(nd.as_py(nd.range(stop=10, step=2)), list(range(0, 10, 2)))
def test_default_dtype(self):
# Defaults to int32 when given ints
self.assertEqual(nd.dtype_of(nd.range(10)), ndt.int32)
# Except if the input numbers don't fit, then returns int64
self.assertEqual(nd.dtype_of(nd.range(2**32, 2**32+10)), ndt.int64)
self.assertEqual(nd.dtype_of(nd.range(-2**32, -2**32+10)), ndt.int64)
# Gives float64 when given floats
self.assertEqual(nd.dtype_of(nd.range(10.0)), ndt.float64)
def test_specified_dtype(self):
# Must return the requested type
self.assertRaises(OverflowError, nd.range, 10, dtype=ndt.bool)
self.assertEqual(nd.dtype_of(nd.range(10, dtype=ndt.int8)), ndt.int8)
self.assertEqual(nd.dtype_of(nd.range(10, dtype=ndt.int16)), ndt.int16)
self.assertEqual(nd.dtype_of(nd.range(10, dtype=ndt.int32)), ndt.int32)
self.assertEqual(nd.dtype_of(nd.range(10, dtype=ndt.int64)), ndt.int64)
self.assertEqual(nd.dtype_of(nd.range(10, dtype=ndt.uint8)), ndt.uint8)
self.assertEqual(nd.dtype_of(nd.range(10, dtype=ndt.uint16)), ndt.uint16)
self.assertEqual(nd.dtype_of(nd.range(10, dtype=ndt.uint32)), ndt.uint32)
self.assertEqual(nd.dtype_of(nd.range(10, dtype=ndt.uint64)), ndt.uint64)
self.assertEqual(nd.dtype_of(nd.range(10, dtype=ndt.float32)), ndt.float32)
self.assertEqual(nd.dtype_of(nd.range(10, dtype=ndt.float64)), ndt.float64)
# Maybe in the future add complex support when start.imag == stop.imag
# and step.imag == 0?
self.assertRaises(TypeError, nd.range, 10, dtype=ndt.complex_float32)
self.assertRaises(TypeError, nd.range, 10, dtype=ndt.complex_float64)
# Float/complex should convert when the dtype is specified
self.assertEqual(nd.dtype_of(nd.range(10.0, dtype=ndt.uint16)), ndt.uint16)
self.assertEqual(nd.dtype_of(nd.range(1.0, step=0.5+0j, dtype=ndt.float32)), ndt.float32)
def test_float_step(self):
# Should produce the correct count for 1.0/int steps
for i in range(1, 32):
a = nd.range(1.0, step=1.0/i)
self.assertEqual(len(a), i)
self.assertEqual(nd.as_py(a[0]), 0)
# For powers of two, should be getting exact answers
for i in range(5):
a = nd.range(1.0, step=1.0/2**i)
self.assertEqual(nd.as_py(a), [float(x)/2**i for x in range(2**i)])
def test_cast_errors(self):
# If a dtype is specified, the inputs must be convertible
self.assertRaises(RuntimeError, nd.range, 1.5, dtype=ndt.int32)
self.assertRaises(RuntimeError, nd.range, 1j, 10, 1, dtype=ndt.int32)
self.assertRaises(RuntimeError, nd.range, 0, 1j, 1, dtype=ndt.int32)
self.assertRaises(RuntimeError, nd.range, 0, 10, 1j, dtype=ndt.int32)
class TestLinspace(unittest.TestCase):
def test_simple(self):
# Default is a count of 50. For these simple cases of integers,
# the result should be exact
self.assertEqual(nd.as_py(nd.linspace(0, 49)), list(range(50)))
self.assertEqual(nd.as_py(nd.linspace(49, 0)), list(range(49, -1, -1)))
self.assertEqual(nd.as_py(nd.linspace(0, 10, count=11)), list(range(11)))
self.assertEqual(nd.as_py(nd.linspace(1, -1, count=2)), [1, -1])
self.assertEqual(nd.as_py(nd.linspace(1j, 50j)), [i*1j for i in range(1, 51)])
def test_default_dtype(self):
# Defaults to float64 when given ints
self.assertEqual(nd.dtype_of(nd.linspace(0, 1)), ndt.float64)
# Gives float64 when given floats
self.assertEqual(nd.dtype_of(nd.linspace(0, 1.0)), ndt.float64)
self.assertEqual(nd.dtype_of(nd.linspace(0.0, 1)), ndt.float64)
# Gives complex[float64] when given complex
self.assertEqual(nd.dtype_of(nd.linspace(1.0, 1.0j)), ndt.complex_float64)
self.assertEqual(nd.dtype_of(nd.linspace(0.0j, 1.0)), ndt.complex_float64)
def test_specified_dtype(self):
# Linspace only supports real-valued outputs
self.assertRaises(RuntimeError, nd.linspace, 0, 1, dtype=ndt.bool)
self.assertRaises(RuntimeError, nd.linspace, 0, 1, dtype=ndt.int8)
self.assertRaises(RuntimeError, nd.linspace, 0, 1, dtype=ndt.int16)
self.assertRaises(RuntimeError, nd.linspace, 0, 1, dtype=ndt.int32)
self.a | ssertRaises(RuntimeError, nd.linspace, 0, 1, dtype=ndt.int64)
self.assertRaises(RuntimeError, nd.linspace, 0, 1, dtype=ndt.uint8)
self.assertRaises(RuntimeError, nd.linspace, 0, 1, dtype=ndt.uint16)
self.assertRaises(RuntimeErr | or, nd.linspace, 0, 1, dtype=ndt.uint32)
self.assertRaises(RuntimeError, nd.linspace, 0, 1, dtype=ndt.uint64)
# Should obey the float/complex type requests
self.assertEqual(nd.dtype_of(nd.linspace(0, 1, dtype=ndt.float32)), ndt.float32)
self.assertEqual(nd.dtype_of(nd.linspace(0, 1, dtype=ndt.float64)), ndt.float64)
self.assertEqual(nd.dtype_of(nd.linspace(0, 1, dtype=ndt.complex_float32)), ndt.complex_float32)
self.assertEqual(nd.dtype_of(nd.linspace(0, 1, dtype=ndt.complex_float64)), ndt.complex_float64)
def test_cast_errors(self):
# If a dtype is specified, the inputs must be convertible
self.assertRaises(RuntimeError, nd.linspace, 0j, 1j, dtype=ndt.float32)
self.assertRaises(RuntimeError, nd.linspace, 0j, 1j, dtype=ndt.float64)
if __name__ == '__main__':
unittest.main()
|
Eagles2F/sync-engine | inbox/auth/_outlook.py | Python | agpl-3.0 | 4,066 | 0.001968 | import datetime
import sqlalchemy.orm.exc
from nylas.logging import get_logger
log = get_logger()
from inbox.auth.oauth import OAuthAuthHandler
from inbox.basicauth import OAuthError
from inbox.models import Namespace
from inbox.config import config
from inbox.models.backends.outlook import OutlookAccount
from inbox.models.backends.oauth import token_manager
from inbox.util.url import url_concat
PROVIDER = '_outlook'
AUTH_HANDLER_CLS = '_OutlookAuthHandler'
# Outlook OAuth app credentials
OAUTH_CLIENT_ID = config.get_required('MS_LIVE_OAUTH_CLIENT_ID')
OAUTH_CLIENT_SECRET = config.get_required('MS_LIVE_OAUTH_CLIENT_SECRET')
OAUTH_REDIRECT_URI = config.get_required('MS_LIVE_OAUTH_REDIRECT_URI')
OAUTH_AUTHENTICATE_URL = 'https://login.live.com/oauth20_authorize.srf'
OAUTH_ACCESS_TOKEN_URL = 'https://login.live.com/oauth20_token.srf'
OAUTH_USER_INFO_URL = 'https://apis.live.net/v5.0/me'
OAUTH_BASE_URL = 'https://apis.live.net/v5.0/'
OAUTH_SCOPE = ' '.join([
'wl.basic', # Read access for basic profile info + contacts
'wl.offline_access', # ability to read / update user's info at any time
'wl.emails', # Read access to user's email addresses
'wl.imap']) # R/W access to user's email using IMAP / SMTP
class _OutlookAuthHandler(OAuthAuthHandler):
OAUTH_CLIENT_ID = OAUTH_CLIENT_ID
OAUTH_CLIENT_SECRET = OAUTH_CLIENT_SECRET
OAUTH_REDIRECT_URI = OAUTH_REDIRECT_URI
OAUTH_AUTHENTICATE_URL = OAUTH_AUTHENTICATE_URL
OAUTH_ACCESS_TOKEN_URL = OAUTH_ACCESS_TOKEN_URL
OAUTH_USER_INFO_URL = OAUTH_USER_INFO_URL
OAUTH_BASE_URL = OAUTH_BASE_URL
OAUTH_SCOPE = OAUTH_SCOPE
def create_account(self, db_session, email_address, response):
email_address = response.get('emails')['account']
try:
account = db_session.query(OutlookAccount).filter_by(
email_address=email_address).one()
except sqlalchemy.orm.exc.NoResultFound:
namespace = Namespace()
account = OutlookAccount(namespace=namespace)
account.refresh_token = response['refresh_token']
account.date = datetime.datetime.utcnow()
tok = response.get('access_token')
expires_in = response.get('expires_in')
token_manager.cache_token(account, tok, expires_in)
account.scope = response.get('scope')
account.email_address = email_address
account.o_id_token = response.get('user_id')
account.o_id = response.get('id')
account.name = response.get('name')
account.gender = response.get('gender')
| account.link = response.get('link')
account.locale = response.get('locale')
# Unlike Gmail, Outlook doesn't return t | he client_id and secret here
account.client_id = OAUTH_CLIENT_ID
account.client_secret = OAUTH_CLIENT_SECRET
# Ensure account has sync enabled.
account.enable_sync()
return account
def validate_token(self, access_token):
return self._get_user_info(access_token)
def interactive_auth(self, email_address=None):
url_args = {'redirect_uri': self.OAUTH_REDIRECT_URI,
'client_id': self.OAUTH_CLIENT_ID,
'response_type': 'code',
'scope': self.OAUTH_SCOPE,
'access_type': 'offline'}
url = url_concat(self.OAUTH_AUTHENTICATE_URL, url_args)
print ('Please visit the following url to allow access to this '
'application. The response will provide '
'code=[AUTHORIZATION_CODE]&lc=XXXX in the location. Paste the'
' AUTHORIZATION_CODE here:')
print '\n{}'.format(url)
while True:
auth_code = raw_input('Enter authorization code: ').strip()
try:
auth_response = self._get_authenticated_user(auth_code)
return auth_response
except OAuthError:
print '\nInvalid authorization code, try again...\n'
auth_code = None
|
Huyuwei/tvm | tests/python/relay/test_pass_alter_op_layout.py | Python | apache-2.0 | 22,068 | 0.001178 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRA | NTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test alter op layout pass"""
import tvm
from tvm import r | elay
from tvm.relay.op import register_alter_op_layout
from tvm.relay import transform, analysis
def run_opt_pass(expr, passes):
passes = passes if isinstance(passes, list) else [passes]
mod = relay.Module.from_expr(expr)
seq = transform.Sequential(passes)
with transform.PassContext(opt_level=3):
mod = seq(mod)
entry = mod["main"]
return entry if isinstance(expr, relay.Function) else entry.body
def test_alter_op():
"""Test directly replacing an operator with a new one"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var('weight', shape=(64, 64, 3, 3))
y = relay.nn.conv2d(x, weight,
channels=64,
kernel_size=(3, 3),
padding=(1, 1))
y = relay.nn.relu(y)
y = relay.Function([x, weight], y)
return y
@register_alter_op_layout("nn.conv2d", level=100)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
weight = relay.multiply(weight, relay.const(2.0, "float32"))
return relay.nn.conv2d(data, weight, **attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var('weight', shape=(64, 64, 3, 3))
y = relay.nn.conv2d(x, relay.multiply(weight, relay.const(2.0, "float32")),
channels=64,
kernel_size=(3, 3),
padding=(1, 1))
y = relay.nn.relu(y)
y = relay.Function([x, weight], y)
return y
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_return_none():
"""Test doing nothing by returning 'None' """
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
y = relay.nn.global_max_pool2d(x)
y = relay.Function([x], y)
return y
called = [False]
@register_alter_op_layout("nn.global_max_pool2d", level=101)
def alter_conv2d(attrs, inputs, tinfos):
called[0] = True
return None
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = before()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
assert(called[0])
def test_alter_layout():
"""Test alternating the layout of a conv2d.
The layout of broadcast operators and the weight should be changed accordingly.
"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias")
weight = relay.var("weight")
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.bias_add(y, bias)
# a useless tuple, which will be eliminated
y = relay.Tuple([y])[0]
y = relay.nn.relu(y)
y = relay.nn.max_pool2d(y, pool_size=(2, 2))
y = relay.cast(y, 'int32')
y = relay.nn.batch_flatten(y)
y = relay.Function(analysis.free_vars(y), y)
return y
@register_alter_op_layout("nn.conv2d", level=102)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs['data_layout'] = 'NCHW16c'
new_attrs['kernel_layout'] = 'OIHW16i'
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias", shape=(64,))
weight = relay.var("weight", shape=(64, 64, 3, 3))
y = relay.layout_transform(x, "NCHW", "NCHW16c")
w = relay.layout_transform(weight, "OIHW", "OIHW16i")
y = relay.nn.conv2d(y, w,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
kernel_layout="OIHW16i",
data_layout="NCHW16c")
b = relay.expand_dims(bias, axis=1, num_newaxis=2)
b = relay.layout_transform(b, "CHW", "CHW16c")
y = relay.add(y, b)
y = relay.nn.relu(y)
y = relay.nn.max_pool2d(y, pool_size=(2, 2), layout="NCHW16c")
y = relay.cast(y, 'int32')
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.nn.batch_flatten(y)
y = relay.Function(analysis.free_vars(y), y)
return y
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(),
transform.AlterOpLayout()])
b = expected()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_dual_path():
"""
Test alternating the layout with two outputs.
One path continues to use the new layout while one path fall backs to old layout.
"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var('weight1')
weight2 = relay.var('weight2')
y = relay.nn.conv2d(x, weight1,
channels=32,
kernel_size=(3, 3),
padding=(1, 1))
y = relay.nn.relu(y)
y1 = relay.nn.conv2d(y, weight2,
channels=32,
kernel_size=(3, 3),
padding=(1, 1))
y1 = relay.nn.relu(y1)
y2 = relay.nn.batch_flatten(y)
ret = relay.Tuple([y1, y2])
y = relay.Function(analysis.free_vars(ret), ret)
return y
@register_alter_op_layout("nn.conv2d", level=103)
def alter_conv2d(attrs, inputs, tinfos):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs['data_layout'] = 'NCHW16c'
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var('weight1')
weight2 = relay.var('weight2')
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(y, weight1,
channels=32,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NCHW16c")
y = relay.nn.relu(y)
y1 = relay.nn.conv2d(y, weight2,
channels=32,
kernel_size=(3, 3),
padding=(1, 1),
data_layout='NCHW16c')
y1 = relay.nn.relu(y1)
y1 = relay.layout_transform(y1, "NCHW16c", "NCHW")
y2 = relay.layout_transform(y, "NCHW16c", "NCHW")
y2 = relay.nn.batch_flatten(y2)
ret = relay.Tuple([y1, y2])
y = relay.Function(analysis.free_vars(ret), ret)
return y
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = expected()
b = run_opt_pass(b, transform.InferType())
assert analysis.alpha_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_resnet():
"""Test alternating the layout of a residual block
This also tests the elimination of duplicated transformation.
If a same transformation applies to a same node twice, only one transformation will be create |
zappyk-github/zappyk-python | src/src_zappyk/findNameSend/setup.py | Python | gpl-2.0 | 2,984 | 0.029155 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'zappyk'
import os, sys
from lib_zappyk import _setup
project = 'FindNameSend'
name = __import__(project).get_project()
description = __import__(project).get_description()
version = __import__(project).get_version()
author = _setup.AUTHOR
author_email = _setup.AUTHOR_EMAIL
license = _setup.LICENSE
url = _setup.URL
keywords = _setup._keywords([author])
path_execute = [name]
name_execute = 'main.py'
path_img_ico = ['images']
name_img_ico = 'gear.ico'
file_execute = os.path.join(os.path.join(*path_execute), name_execute)
file_img_ico = os.path.join(os.path.join(*path_img_ico), name_img_ico)
pkgs_exclude = ['tkinter', 'PyQt4']
#pkgs_include= _setup._find_packages('.', pkgs_exclude)
pkgs_include = _setup._find_packages(exclude=pkgs_exclude)
file_include = ['%s-config.ini' % project
,'%s-logger.ini' % project
,'%s-launch.bat' % project
]
build_exe = None
build_exe = _setup._build_exe(None, name, version)
###############################################################################
(base, exte) = _setup._setup_Executable_base_exte()
executables = _setup._setup_Executable(file_execute
,base=base
,icon=file_img_ico
,appendScriptToExe=False
,appendScriptToLibrary=False
,copyDependentFiles=True
,targetName=name+exte
)
'''
options = {
'build_exe': {
'create_shared_zip': False,
'compressed': True,
'packages': pkgs_include,
'excludes': pkgs_exclude,
'include_files': file_include,
'includes': [
'testfreeze_1',
'testfreeze_2'
],
'path': sys.path + ['modules']
}
}
'''
buildOptions = dict(create_shared_zip=False
,compressed=True
,packages=pkgs_include
,excludes=pkgs_exclude
,include_files=file_include
# ,namespace_packages=[name]
# ,path=sys.path+['/path/more/modules']
,build_exe=build_exe
)
setupOptions = dict(name=name
,version=version
,url=url
,author=author
,author_email=author_email
,description=descripti | on
,license=license
,keywords=keywords
,executables=[executables]
,options=dict(build_exe=buildOptions)
# ,packages=pkgs_include
# ,include_package_data=True
# ,scripts=[file_execute]
# ,zip_safe=True
)
_setup._setup(**setupOptions)
############################################################# | ##################
sys.exit(0)
|
luboslenco/lue | tools/io_export_arm.py | Python | lgpl-3.0 | 29,689 | 0.003166 | # Armory Mesh Exporter
# https://armory3d.org/
#
# Based on Open Game Engine Exchange
# https://opengex.org/
# Export plugin for Blender by Eric Lengyel
# Copyright 2015, Terathon Software LLC
#
# This software is licensed under the Creative Commons
# Attribution-ShareAlike 3.0 Unported License:
# http://creativecommons.org/licenses/by-sa/3.0/deed.en_US
bl_info = {
"name": "Armory Mesh Exporter",
"category": "Import-Export",
"location": "File -> Export",
"description": "Armory mesh data",
"author": "Armory3D.org",
"version": (2019, 12, 0),
"blender": (2, 81, 0),
"wiki_url": "https://github.com/armory3d/iron/wiki",
"tracker_url": "https://github.com/armory3d/iron/issues"
}
import math
import os
import time
import numpy as np
import bpy
from bpy_extras.io_utils import ExportHelper
from mathutils import *
NodeTypeBone = 1
NodeTypeMesh = 2
structIdentifier = ["object", "bone_object", "mesh_object"]
class ArmoryExporter(bpy.types.Operator, ExportHelper):
'''Export to Armory format'''
bl_idname = "export_scene.arm"
bl_label = "Export Armory"
filename_ext = ".arm"
def execute(self, context):
profile_time = time.time()
current_frame = context.scene.frame_current
current_subframe = context.scene.frame_subframe
self.scene = context.scene
self.output = {}
self.bobjectArray = {}
self.bobjectBoneArray | = {}
self.meshArray = {}
self.boneParentArray = {}
self.bone_tracks = []
self.depsgraph = context.evaluated_depsgraph_get()
scene_objects = self.scene.collection.all_objects
for bobject in scene_objects:
if not bobject.parent:
self.process_bobject(bobject)
self.process_skinned_meshes()
self.output['name'] = self.scene.name
| self.output['objects'] = []
for bo in scene_objects:
if not bo.parent:
self.export_object(bo, self.scene)
self.output['mesh_datas'] = []
for o in self.meshArray.items():
self.export_mesh(o, self.scene)
self.write_arm(self.filepath, self.output)
self.scene.frame_set(current_frame, subframe=current_subframe)
print('Scene exported in ' + str(time.time() - profile_time))
return {'FINISHED'}
def write_arm(self, filepath, output):
with open(filepath, 'wb') as f:
f.write(packb(output))
def write_matrix(self, matrix):
return [matrix[0][0], matrix[0][1], matrix[0][2], matrix[0][3],
matrix[1][0], matrix[1][1], matrix[1][2], matrix[1][3],
matrix[2][0], matrix[2][1], matrix[2][2], matrix[2][3],
matrix[3][0], matrix[3][1], matrix[3][2], matrix[3][3]]
def find_bone(self, name):
for bobject_ref in self.bobjectBoneArray.items():
if bobject_ref[0].name == name:
return bobject_ref
return None
def collect_bone_animation(self, armature, name):
path = "pose.bones[\"" + name + "\"]."
curve_array = []
if armature.animation_data:
action = armature.animation_data.action
if action:
for fcurve in action.fcurves:
if fcurve.data_path.startswith(path):
curve_array.append(fcurve)
return curve_array
def export_bone(self, armature, bone, scene, o, action):
bobjectRef = self.bobjectBoneArray.get(bone)
if bobjectRef:
o['type'] = structIdentifier[bobjectRef["objectType"]]
o['name'] = bobjectRef["structName"]
self.export_bone_transform(armature, bone, scene, o, action)
o['children'] = []
for subbobject in bone.children:
so = {}
self.export_bone(armature, subbobject, scene, so, action)
o['children'].append(so)
def export_pose_markers(self, oanim, action):
if action.pose_markers == None or len(action.pose_markers) == 0:
return
oanim['marker_frames'] = []
oanim['marker_names'] = []
for m in action.pose_markers:
oanim['marker_frames'].append(int(m.frame))
oanim['marker_names'].append(m.name)
def process_bone(self, bone):
self.bobjectBoneArray[bone] = {"objectType" : NodeTypeBone, "structName" : bone.name}
for subbobject in bone.children:
self.process_bone(subbobject)
def process_bobject(self, bobject):
if bobject.type != "MESH" and bobject.type != "ARMATURE":
return
btype = NodeTypeMesh if bobject.type == "MESH" else 0
self.bobjectArray[bobject] = {"objectType" : btype, "structName" : bobject.name}
if bobject.type == "ARMATURE":
skeleton = bobject.data
if skeleton:
for bone in skeleton.bones:
if not bone.parent:
self.process_bone(bone)
for subbobject in bobject.children:
self.process_bobject(subbobject)
def process_skinned_meshes(self):
for bobjectRef in self.bobjectArray.items():
if bobjectRef[1]["objectType"] == NodeTypeMesh:
armature = bobjectRef[0].find_armature()
if armature:
for bone in armature.data.bones:
boneRef = self.find_bone(bone.name)
if boneRef:
boneRef[1]["objectType"] = NodeTypeBone
def export_bone_transform(self, armature, bone, scene, o, action):
pose_bone = armature.pose.bones.get(bone.name)
transform = bone.matrix_local.copy()
if bone.parent is not None:
transform = (bone.parent.matrix_local.inverted_safe() @ transform)
o['transform'] = {}
o['transform']['values'] = self.write_matrix(transform)
curve_array = self.collect_bone_animation(armature, bone.name)
animation = len(curve_array) != 0
if animation and pose_bone:
begin_frame = int(action.frame_range[0])
end_frame = int(action.frame_range[1])
tracko = {}
o['anim'] = {}
o['anim']['tracks'] = [tracko]
tracko['target'] = "transform"
tracko['frames'] = []
for i in range(begin_frame, end_frame + 1):
tracko['frames'].append(i - begin_frame)
tracko['values'] = []
self.bone_tracks.append((tracko['values'], pose_bone))
def write_bone_matrices(self, scene, action):
begin_frame = int(action.frame_range[0])
end_frame = int(action.frame_range[1])
if len(self.bone_tracks) > 0:
for i in range(begin_frame, end_frame + 1):
scene.frame_set(i)
for track in self.bone_tracks:
values, pose_bone = track[0], track[1]
parent = pose_bone.parent
if parent:
values += self.write_matrix((parent.matrix.inverted_safe() @ pose_bone.matrix))
else:
values += self.write_matrix(pose_bone.matrix)
def export_object(self, bobject, scene, parento=None):
bobjectRef = self.bobjectArray.get(bobject)
if bobjectRef:
o = {}
o['type'] = structIdentifier[bobjectRef["objectType"]]
o['name'] = bobjectRef["structName"]
if bobject.parent_type == "BONE":
o['parent_bone'] = bobject.parent_bone
if bobjectRef["objectType"] == NodeTypeMesh:
objref = bobject.data
if not objref in self.meshArray:
self.meshArray[objref] = {"structName" : objref.name, "objectTable" : [bobject]}
else:
self.meshArray[objref]["objectTable"].append(bobject)
oid = self.meshArray[objref]["structName"]
o['data_ref'] = oid
o['dimensions'] = self.calc_aabb(bobject)
o['transform'] = {}
o['transform']['values'] = self.writ |
ScienceWorldCA/domelights | backend/artnet-bridge/artnet-server.py | Python | apache-2.0 | 234 | 0.012821 | from artnet import *
import SocketServer
import time, os, random, datetime, sys
import argparse
import socket
import struct
from subprocess import Popen, PIPE, STDOUT
import glob
DEBUG | = False
UD | P_IP = "2.0.0.61"
UDP_PORT = 6454
|
CBien/django-alert | test_project/manage.py | Python | mit | 698 | 0.004298 | #!/usr/bin/env python
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it | your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
try:
from django.core.management import execute_from_command_line
| execute_from_command_line()
except ImportError:
from django.core.management import execute_manager
execute_manager(settings)
|
shadowoneau/skylines | skylines/commands/tracking/stats.py | Python | agpl-3.0 | 4,007 | 0.001248 | from flask_script import Command, Option
from datetime import timedelta
from itertools import chain
from skylines.database import db
from skylines.model import TrackingFix, User
class Stats(Command):
""" Analyse live tracks and output statistics """
option_list = (
Option('user', type=int, help='a user ID'),
Option('--json', action='store_true', help='enable JSON output'),
)
def run(self, user, json):
stats = self.gather_statistics(user)
if json:
from flask import json
print json.dumps(stats)
else:
self.print_statistics(stats)
def get_pilot(self, user_id):
pilot = User.get(user_id)
return dict(name=pilot.name, id=pilot.id)
def get_base_query(self, user_id):
return db.session.query(TrackingFix) \
.filter_by(pilot_id=user_id) \
.order_by(TrackingFix.time)
def gather_sessions_statistics(self, user_id):
sessions = []
session = None
last_fix = None
for fix in chain(self.get_base_query(user_id), [None]):
is_start = (last_fix is None)
is_end = (fix is None)
# check if this is a new live tracking session (dt > 3 hours)
| dt = (fix.time - last_fix.time) if (fix and last_fix) else None
is_new_session = dt an | d dt > timedelta(hours=3)
# update current session
if not (is_start or is_new_session or is_end):
session['num_fixes'] += 1
dt_secs = dt.total_seconds()
session['min_dt'] = min(dt_secs, session.get('min_dt', 999999))
session['max_dt'] = max(dt_secs, session.get('max_dt', 0))
# save last_fix in session and append it to the session list
if last_fix and (is_end or is_new_session):
session['end'] = last_fix.time
duration = (session.get('end') - session.get('start')).total_seconds()
if session.get('num_fixes') > 1 and duration > 0:
session['avg_dt'] = duration / (session.get('num_fixes') - 1)
session['quality'] = session.get('min_dt') / session.get('avg_dt')
sessions.append(session)
# start a new session
if fix and (is_start or is_new_session):
session = dict()
session['start'] = fix.time
session['num_fixes'] = 1
last_fix = fix
return sessions
def gather_statistics(self, user):
stats = dict()
stats['pilot'] = self.get_pilot(user)
stats['num_fixes'] = self.get_base_query(user).count()
stats['sessions'] = self.gather_sessions_statistics(user)
return stats
def print_statistics(self, stats):
pilot = stats.get('pilot')
sessions = stats.get('sessions')
print 'Live tracking statistics for user: {} (ID: {})'.format(pilot.get('name'), pilot.get('id'))
print
print 'Number of sessions: {}'.format(len(sessions))
print 'Number of received fixes: {}'.format(stats.get('num_fixes'))
if sessions:
print
print 'Sessions:'
for session in sessions:
self.print_session(session)
def print_session(self, session):
start = session.get('start')
end = session.get('end')
duration = end - start
duration -= timedelta(microseconds=duration.microseconds)
print '{date} - {start}-{end} - {duration} - Q {quality:04.2%} - {num_fixes} fixes (dt: {min_dt:.1f}, avg {avg_dt:.1f})'.format(
date=start.strftime('%d.%m.%Y'),
start=start.strftime('%H:%M'),
end=end.strftime('%H:%M'),
duration=duration,
quality=session.get('quality', 1),
num_fixes=session.get('num_fixes'),
min_dt=session.get('min_dt', 0),
avg_dt=session.get('avg_dt', 0))
|
ThomasYeoLab/CBIG | stable_projects/fMRI_dynamics/Kong2021_pMFM/part2_pMFM_control_analysis/Individual_analysis/scripts/CBIG_pMFM_step5_validation_IndividualGrad.py | Python | mit | 3,061 | 0.000653 | # /usr/bin/env python
'''
Written by Kong Xiaolu and CBIG under MIT license:
https://github.com/ThomasYeoLab/CBIG/blob/master/LICENSE.md
'''
import os
import numpy as np
import torch
import CBIG_pMFM_basic_functions as fc
def CBIG_mfm_validation_desikan_main(gpu_index=0, subject=1):
'''
This function is to validate the estimated parameters of mean field model.
The objective function is the summation of FC correlation cost and FCD KS statistics cost.
Args:
gpu_index: index of gpu used for optimization
Returns:
None
'''
# Setting GPU
torch.cuda.set_device(gpu_index)
# Create output folder
subject_path = '../../../input/Desikan_input/individual_input/subject' + str(
subject) + '/'
input_path = '../output/subject' | + str(subject) + '/gradient/training/'
output_path = '../output/subject' + str(subject) + '/gradient/validation/'
if not os.path.isdir(output_path):
os.makedirs(output_path)
# Loading myelin and gradient data
highest_order = 1
gradient_data = fc.csv_matrix_read(
'../../../input/Desikan_input/rsfc_gradient.csv')
gradient_data = gradient_data[:, 0]
n_node = gradient_data.shape[0]
amatrix = np.zeros | ((n_node, highest_order + 1))
for i in range(highest_order + 1):
amatrix[:, i] = gradient_data**(i)
template_mat = amatrix
# Setting hyper-parameters
n_trial = 2
vali_dup = 20
for i in range(1, 8):
random_seed_cuda = i + 100
torch.cuda.manual_seed(random_seed_cuda)
load_file = ['random_initialization_', str(i), '.csv']
load_path = [input_path] + load_file
xmin = fc.csv_matrix_read(''.join(load_path))
x_nonzero = xmin[:, xmin[0, :] != 0]
x_mass = x_nonzero[0:-3, :]
result_save = np.zeros((6 + 3 * n_node + 1, x_nonzero.shape[1]))
result_save[0:3, :] = x_nonzero[-3:, :]
para_w = template_mat @ x_mass[0:highest_order + 1, :]
para_I = template_mat @ x_mass[highest_order + 1:2 *
(highest_order + 1), :]
para_sigma = template_mat @ x_mass[2 * (highest_order + 1) +
1:x_mass.shape[0], :]
arx_mass = np.concatenate(
(para_w, para_I,
x_mass[2 * (highest_order + 1):2 * (highest_order + 1) + 1, :],
para_sigma), 0)
result_save[6:, :] = arx_mass
for k in range(n_trial):
in_para = arx_mass[:, 50 * k:50 * (k + 1)]
vali_total, vali_ks, vali_corr = fc.CBIG_combined_cost_validation(
in_para, vali_dup, subject_path)
result_save[3, 50 * k:50 * (k + 1)] = vali_corr
result_save[4, 50 * k:50 * (k + 1)] = vali_ks
result_save[5, 50 * k:50 * (k + 1)] = vali_total
save_path = [output_path] + load_file
np.savetxt(''.join(save_path), result_save, delimiter=',')
if __name__ == '__main__':
CBIG_mfm_validation_desikan_main(subject=1, gpu_index=0)
|
kahuang/jarvis | jarvis/modules/coinbase/watcher.py | Python | mit | 8,706 | 0.004824 | import cPickle
import logging
import numpy
import os
import time
from collections import deque
from copy import deepcopy
from datetime import datetime
from pytz import timezone
from threading import Event, Thread
from coinbase.wallet.client import Client
from jarvis.utils.messaging.client import TwilioMessenger
from jarvis.modules.base import JarvisThreadedModule
def configure_debug_logging():
logging.basicConfig(level=logging.DEBUG)
def load_coinbase_config():
coinbase_key = os.environ.get('COINBASE_KEY')
coinbase_secret = os.environ.get('COINBASE_SECRET')
if not all([coinbase_key, coinbase_secret]):
raise Exception('Coinbase config not configured properly')
return (coinbase_key, coinbase_secret)
def load_from_file(path):
if os.path.exists(path):
with open(path,'r') as f:
return cPickle.loads(f.read())
return None
def store_to_file(path, obj):
with open(path,'w') as f:
f.write(cPickle.dumps(obj))
class CoinbaseClient(object):
def __init__(self):
self.api_key, self.api_secret = load_coinbase_config()
self.client = Client(self.api_key, self.api_secret)
def do(self, func, *args, **kwargs):
return getattr(self.client,func)(*args, **kwargs)
class TickerTimeseries(object):
def __init__(self, max_length, recent_cutoff,
load_path=None, poll_period=30, name=None):
self.timeseries = load_from_file(load_path)
if not self.timeseries:
self.timeseries = deque(maxlen=max_length)
self.large_movement_timeseries = deepcopy(self.timeseries)
self.recent_cutoff = recent_cutoff
self.max_length = max_length
self.poll_period = poll_period
self.name = name
def append(self, val):
self.timeseries.append(val)
self.large_movement_timeseries.append(val)
@property
def head(self):
return self.timeseries[-1]
@property
def tail(self):
return self.timeseries[0]
@property
def mean(self):
return numpy.mean(self.timeseries)
@property
def length(self):
return len(self.timeseries)
@classmethod
def anomaly(cls, series, recent_cutoff):
'''
Naive anomaly detection. Given a series it computes
the standard deviation and returns True if any of the values
in the last :recent_cutoff points are are more than
3 standard deviationsm above the mean
:series array of timeseries data
:recent_cutoff only consider anomalies on the most recent points
'''
std_dev = numpy.std(series)
mean = numpy.mean(series)
for point in series[-recent_cutoff:]:
abs_diff = abs(point - mean)
if abs_diff >= std_dev * 3 and abs_diff >= 3:
return True
return False
def is_anomalous(self):
# If we don't have enough data, don't do anything
if len(self.timeseries) < self.recent_cutoff:
return False
return self.anomaly(self.timeseries, self.recent_cutoff)
@classmethod
def large_movement(self, series):
if float(abs(series[0] - series[-1])) / series[0] > 0.03:
return True
return False
def is_large_movement(self):
if self.large_movement(self.large_movement_timeseries):
msg = MOVEMENT_NOTIFICATION % \
(self.name,
len(self.large_movement_timeseries) * self.poll_period / 60,
self.large_movement_timeseries[0],
self.large_movement_timeseries[-1])
self.large_movement_timeseries = deque(
[self.large_movement_timeseries[-1]],
maxlen=self.max_length)
return msg
return None
ANOMALY_NOTIFICATION = \
'''Anomalous bitcoin price activity detected. Mean price over the
past %d minutes is %.2f, current price is %.2f'''
MOVEMENT_NOTIFICATION = \
'''Large %s movement detected. Price %d minutes ago was %.2f,
current price is %.2f'''
class CoinbaseWatcher(object):
POLL_PERIOD = 30
RECENT_DATA = 60 * 5
MAX_LENGTH_MULTIPLE = 12 * 24
COOLDOWN_TICKS = 10
BTCTICKERPATH = "/tmp/bitccointicker"
ETHTICKERPATH = "/tmp/ethticker"
MSGPATH = "/tmp/bitcoinmsgs"
def __init__(self, stop):
recent_points = self.RECENT_DATA / self.POLL_PERIOD
self.twilio_client = TwilioMessenger()
self.coinbase_client = CoinbaseClient()
self.btc_timeseries = TickerTimeseries(
max_length=recent_points*self.MAX_LENGTH_MULTIPLE,
recent_cutoff=recent_points,
load_path=self.BTCTICKERPATH,
poll_period=self.POLL_PERIOD,
name='Bitcoin')
self.eth_timeseries = TickerTimeseries(
max_length=recent_points*self.MAX_LENGTH_MULTIPLE,
recent_cutoff=recent_points,
load_path=self.ETHTICKERPATH,
poll_period=self.POLL_PERIOD,
name='Ethereum')
self.cooldown = 0
self.stop = stop
self.sent_messages = load_from_file(self.MSGPATH)
if not self.sent_messa | ges:
self.sent_messages = deque(maxlen=3)
@property
def raw_btc_timeseries(self):
return self.btc_timeseries.timeseries
@property
def raw_eth_timeseries(self):
return self.eth_timeseries.timeseries
@property
def in_cooldown(self):
self.cooldown = max(0,self.cooldown - 1)
if self.cooldown <= 0:
return False
return True
def initiate_cooldown(self):
self. | cooldown = self.COOLDOWN_TICKS
def start(self):
while not self.stop.is_set():
try:
spot_price = self.coinbase_client.do(
'get_spot_price',currency_pair='BTC-USD')
self.btc_timeseries.append(float(spot_price['amount']))
# coinbase client doesn't actually support currency_pair
rates = self.coinbase_client.do('get_exchange_rates')
self.eth_timeseries.append(1 / float(rates['rates']['ETH']))
if not self.in_cooldown:
msg = self.btc_timeseries.is_large_movement()
if msg:
self.twilio_client.send_message(msg)
self.sent_messages.append((msg, time.time()))
self.initiate_cooldown()
msg = self.eth_timeseries.is_large_movement()
if msg:
self.twilio_client.send_message(msg)
self.sent_messages.append((msg, time.time()))
self.initiate_cooldown()
except Exception:
logging.exception("Exception in main loop")
time.sleep(self.POLL_PERIOD)
store_to_file(self.MSGPATH,self.sent_messages)
store_to_file(self.BTCTICKERPATH,self.btc_timeseries.timeseries)
store_to_file(self.ETHTICKERPATH,self.eth_timeseries.timeseries)
class CoinbaseWatcherModule(JarvisThreadedModule):
def init_module(self, event):
self.coinbase_watcher = CoinbaseWatcher(event)
return Thread(target=self.coinbase_watcher.start)
def get_recent_messages(self):
return [
(msg, self.convert_timestamp(timestamp)) \
for (msg,timestamp) in \
reversed(self.coinbase_watcher.sent_messages)
]
@classmethod
def convert_timestamp(cls, timestamp):
pacific = timezone("US/Pacific-New")
utc = timezone("UTC")
return utc.localize(datetime.utcfromtimestamp(
timestamp)).astimezone(pacific).strftime('%Y-%m-%d %H:%M:%S')
def get_bitcoin_ticker_timeseries(self):
seconds_per_point = self.coinbase_watcher.POLL_PERIOD
now = time.time()
return [
{
'date' : self.convert_timestamp(now-seconds_per_point*i),
'value' : val
} for i,val in enumerate(reversed(
self.coinbase_watcher.ra |
gogobook/Spirit | spirit/comment/urls.py | Python | mit | 1,185 | 0.003376 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
import spirit.comment.bookmark.urls
import spirit.comment.flag.urls
import spirit.comment.history.urls
import spirit.comment.like.urls
from . import views
urlpatterns = [
url(r'^(?P<topic_id>\d+)/publish/$', views.publish, name='publish'),
url(r'^(?P<topic_id>\d+)/publish/(?P<pk>\d+)/quote/$', views.publish, name='publish'),
url(r'^(?P<pk>\d+)/update/$', views.upd | ate, name='update'),
url(r'^(?P<pk>\d+)/find/$', views.find, name='find'),
url(r'^(?P<topic_id>\d+)/move/$', views.move, name='move'),
url(r'^(?P<pk>\d+)/delete/$', views.delete, name='delete'),
url(r'^(?P<pk>\d+)/undelete/$', views.delete, kwargs={'remove': False, }, name='undelete'),
url(r'^upload/$', views.image_upload_ajax, name='image-upload-ajax'),
url(r'^bookmark/', include(spirit.comment.bookmark.urls, namespace='bookmark')) | ,
url(r'^flag/', include(spirit.comment.flag.urls, namespace='flag')),
url(r'^history/', include(spirit.comment.history.urls, namespace='history')),
url(r'^like/', include(spirit.comment.like.urls, namespace='like')),
]
|
XiaoMi/galaxy-fds-sdk-python | fds/model/timestamp_anti_stealing_link_config.py | Python | apache-2.0 | 1,037 | 0.019286 | class TimestampAntiStealingLinkConfig(dict):
def __init__(self, json):
if json is not None:
if 'enabled' in json.keys():
self.enabled = json['enabled']
else:
self.enabled = None
if 'primaryKey' in json.keys():
self.primary_key = json['primaryKey']
else:
self.primary_key = None
if 'secondaryKey' in json.keys():
self.secondary_key = json['secondaryKey']
else:
self. | secondary_key = None
else:
raise GalaxyFDSClientException("Json data cannot be None")
@property
def enabled(self):
return self['enabled']
@enabled.setter
def enabled(self, enabled):
self['enabled'] = enabled
| @property
def primary_key(self):
return self['primaryKey']
@primary_key.setter
def primary_key(self, primary_key):
self['primaryKey'] = primary_key
@property
def secondary_key(self):
return self['secondaryKey']
@secondary_key.setter
def secondary_key(self, secondary_key):
self['secondaryKey'] = secondary_key
|
jonludlam/xen | tools/python/xen/util/oshelp.py | Python | gpl-2.0 | 961 | 0.014568 | import fcntl
import os
def close_fds(pass_fds=()):
try:
| MAXFD = | os.sysconf('SC_OPEN_MAX')
except:
MAXFD = 256
for i in range(3, MAXFD):
if i in pass_fds:
continue
try:
os.close(i)
except OSError:
pass
def fcntl_setfd_cloexec(file, bool):
f = fcntl.fcntl(file, fcntl.F_GETFD)
if bool: f |= fcntl.FD_CLOEXEC
else: f &= ~fcntl.FD_CLOEXEC
fcntl.fcntl(file, fcntl.F_SETFD, f)
def waitstatus_description(st):
if os.WIFEXITED(st):
es = os.WEXITSTATUS(st)
if es: return "exited with nonzero status %i" % es
else: return "exited"
elif os.WIFSIGNALED(st):
s = "died due to signal %i" % os.WTERMSIG(st)
if os.WCOREDUMP(st): s += " (core dumped)"
return s
else:
return "failed with unexpected wait status %i" % st
|
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/PyKDE4/kio/KDirWatch.py | Python | gpl-2.0 | 2,320 | 0.011207 | # encoding: utf-8
# module PyKDE4.kio
# from /usr/lib/python2.7/dist-packages/PyKDE4/kio.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdeui as __PyKDE4_kdeui
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
class KDirWatch(__PyQt4_QtCore.QObject):
# no doc
def addDir(self, *args, **kwargs): # real signature unknown
pass
def addFile(self, *args, **kwargs): # real signature unknown
pass
def contains(self, *args, **kwargs): # real signature unknown
pass
def created(self, *args, **kwargs): # real signature unknown
pass
def ctime(self, *args, **kwargs): # real signature unknown
pass
def deleted(self, *args, **kwargs): # real signature unknown
pass
def dirty(self, *args, **kwargs): # real signature unknown
pass
def exists(self, *args, **kwargs): # real signature unknown
pass
def internalMethod(self, *args, **kwargs): # real signature unknown
pass
def isStopped(self, *args, **kwargs): # real signature unknown
pass
def removeDir(self, *args, **kwargs): | # real signature unk | nown
pass
def removeFile(self, *args, **kwargs): # real signature unknown
pass
def restartDirScan(self, *args, **kwargs): # real signature unknown
pass
def self(self, *args, **kwargs): # real signature unknown
pass
def setCreated(self, *args, **kwargs): # real signature unknown
pass
def setDeleted(self, *args, **kwargs): # real signature unknown
pass
def setDirty(self, *args, **kwargs): # real signature unknown
pass
def startScan(self, *args, **kwargs): # real signature unknown
pass
def statistics(self, *args, **kwargs): # real signature unknown
pass
def stopDirScan(self, *args, **kwargs): # real signature unknown
pass
def stopScan(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
DNotify = 2
FAM = 0
INotify = 1
Method = None # (!) real value is ''
Stat = 3
WatchDirOnly = 0
WatchFiles = 1
WatchMode = None # (!) real value is ''
WatchModes = None # (!) real value is ''
WatchSubDirs = 2
|
mishbahr/django-connected | connected_accounts/providers/google.py | Python | bsd-3-clause | 1,465 | 0 | import logging
from django.utils.translation import ugettext_lazy as _
from connected_accounts.conf import settings
from connected_accounts.provider_pool import providers
from .base import OAuth2Provider, ProviderAccount
logger = logging.getLogger('connected_accounts')
class GoogleAccount(ProviderAccount):
def get_profile_url(self):
return self.account.extra_data.get('link')
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
default = super(GoogleAccount, self).to_str()
return self.account.extra_data.get('name', default)
def extract_common_fields(self):
data = self.account.extra_data
return dict(email=data.get('email'),
last_name=data.get('family_name'),
first_name=data.get('given_name'))
class GoogleProvider(O | Auth2Provider):
id = 'google'
name = _('Google+')
account_class = GoogleAccount
authorization_url = 'https://accounts.google.com/o/oauth2/auth'
access_token_url = 'https://accounts.google.com/o/oauth2/token'
profile_url = 'https://www.googleapis.com/oauth2/v1/userinfo'
consumer_key = settings.CONNECTED_ACCOUNTS_GOOGLE_CONSUMER_KEY
consumer_secret = settings.CONNECTED_ACCOU | NTS_GOOGLE_CONSUMER_SECRET
scope = settings.CONNECTED_ACCOUNTS_GOOGLE_SCOPE
auth_params = settings.CONNECTED_ACCOUNTS_GOOGLE_AUTH_PARAMS
providers.register(GoogleProvider)
|
qiyeboy/SpiderBook | ch11/APISpider/SpiderMan.py | Python | mit | 1,040 | 0.007692 | #coding:utf-8
from APISpider.SpiderDataOutput import SpiderDataOutput
from APISpider.SpiderDownloader import SpiderDownloader
from APISpider.SpiderParser import SpiderParser
class SpiderMan(object):
def __init__(self):
self.downloader = SpiderDownloader()
self.parser = SpiderParser()
self.output = SpiderDataOutput()
def crawl(self,root_url):
content = self.downloader.download(root_ | url)
for info in self.parser.get_kw_cat(content):
print info
cat_name = info['cat_name']
detail_url = 'http://ts.kuwo.cn/service/getlist.v31.php?act=detail&id=%s'%info['id']
content = self.downloader.download(detail_url)
details = self.parser.get_kw_detail(content)
print detail_url
self.output.output_html | (self.output.filepath,details)
self.output.ouput_end(self.output.filepath)
if __name__ =="__main__":
spider = SpiderMan()
spider.crawl('http://ts.kuwo.cn/service/getlist.v31.php?act=cat&id=50')
|
rpetersburg/fiber_properties | fiber_properties/calibrated_image.py | Python | mit | 11,258 | 0.001244 | """calibrated_image.py was written by Ryan Petersburg for use with fiber
characterization on the EXtreme PREcision Spectrograph
"""
import numpy as np
from .base_image import BaseImage
from .numpy_array_handler import filter_image, subframe_image
class CalibratedImage(BaseImage):
"""Fiber face image analysis class
Class that contains calibration images and executes corrections based on
those images
Attributes
----------
dark : str, array_like, or None
The input used to set the dark image. See
BaseImage.convert_image_to_array() for details
ambient : str, array_like, or None
The input used to set the ambient image. See
BaseImage.convert_image_to_array() for details
flat : str, array_like, or None
The input used to set the flat image. See
BaseImage.convert_image_to_array() for details
kernel_size : int (odd)
The kernel side length used when filtering the image. This value may
need to be tweaked, especially with few co-added images, due to random
noise. The filtered image is used for the centering algorithms, so for
a "true test" use kernel_size=1, but be careful, because this may
lead to needing a fairly high threshold for the noise.
new_calibration : bool
Whether or not self.calibration has been set with new images
Args
----
image_input : str, array_like, or None, optional
See BaseImage class for details
dark : str, array_like, or None, optional
Image input to instantiate BaseImage for dark image
ambient : str, array_like, or None, optional
Image input to instantiate BaseImage for ambient image
flat : str, array_like, or None, optional
Image input to instantiate BaseImage for flat image
kernel_size : int (odd), optional
Set the kernel size for filtering
**kwargs : keworded arguments
Passed into the BaseImage superclass
"""
def __init__(self, image_input, dark=None, ambient=None, flat=None,
kernel_size=9, **kwargs):
self.dark = dark
self.ambient = ambient
self.flat = flat
self.kernel_size = kernel_size
self.new_calibration = True
super(CalibratedImage, self).__init__(image_input, **kwargs)
#=========================================================================#
#==== Primary Image Getters ==============================================#
#=========================================================================#
def get_uncorrected_image(self):
"""Return the raw image without corrections or filtering.
Returns
-------
uncorrected_image : 2D numpy array
Raw image or average of images (depending on image_input)
"""
return self.convert_i | mage_to_array(self.image_input)
def get_image(self):
"""Return the corrected image
This method must be called to get access to the corrected 2D numpy
array being analyzed. Attempts to access a previously saved image
under self.image_file or otherwise applies corrections to the raw
images pulled from their respective files
Returns
-------
image : 2D numpy array
Image corrected by cal | ibration images
"""
if self.image_file is not None and not self.new_calibration:
return self.image_from_file(self.image_file)
return self.execute_error_corrections(self.get_uncorrected_image())
def get_uncorrected_filtered_image(self, kernel_size=None, **kwargs):
"""Return a median filtered image
Args
----
kernel_size : {None, int (odd)}, optional
The side length of the kernel used to median filter the image. Uses
self.kernel_size if None.
Returns
-------
filtered_image : 2D numpy array
The stored image median filtered with the given kernel_size
"""
image = self.get_uncorrected_image()
if image is None:
return None
if kernel_size is None:
kernel_size = self.kernel_size
return filter_image(image, kernel_size, **kwargs)
def get_filtered_image(self, kernel_size=None, **kwargs):
"""Return an error corrected and median filtered image
Returns
-------
filtered_image : 2D numpy array
The stored image median filtered with the given kernel_size and
error corrected using the given method
"""
image = self.get_image()
if image is None:
return None
if kernel_size is None:
kernel_size = self.kernel_size
return filter_image(image, kernel_size, **kwargs)
#=========================================================================#
#==== Calibration Image Getters ==========================================#
#=========================================================================#
def get_dark_image(self):
"""Returns the dark image.
Args
----
full_output : boolean, optional
Passed to converImageToArray function
Returns
-------
dark_image : 2D numpy array
The dark image
output_obj : ImageInfo, optional
Object containing information about the image, if full_output=True
"""
return BaseImage(self.dark).get_image()
def get_ambient_image(self):
"""Returns the ambient image.
Args
----
full_output : boolean, optional
Passed to converImageToArray function
Returns
-------
dark_image : 2D numpy array
The dark image
output_obj : ImageInfo, optional
Object containing information about the image, if full_output=True
"""
return CalibratedImage(self.ambient, dark=self.dark).get_image()
def get_flat_image(self):
"""Returns the flat image.
Args
----
full_output : boolean, optional
Passed to converImageToArray function
Returns
-------
dark_image : 2D numpy array
The dark image
output_obj : ImageInfo, optional
Object containing information about the image, if full_output=True
"""
return CalibratedImage(self.flat, dark=self.dark).get_image()
def set_dark(self, dark):
"""Sets the dark calibration image."""
self.dark = dark
self.new_calibration = True
def set_ambient(self, ambient):
"""Sets the ambient calibration image."""
self.ambient = ambient
self.new_calibration = True
def set_flat(self, flat):
"""Sets the flat calibration images."""
self.flat = flat
self.new_calibration = True
#=========================================================================#
#==== Image Calibration Algorithm ========================================#
#=========================================================================#
def execute_error_corrections(self, image):
"""Applies corrective images to image
Applies dark image to the flat field and ambient images. Then applies
flat field and ambient image correction to the primary image
Args
----
image : 2D numpy array
Image to be corrected
Returns
-------
corrected_image : 2D numpy array
Corrected image
"""
if image is None:
return None
corrected_image = image
dark_image = self.get_dark_image()
if dark_image is not None and dark_image.shape != corrected_image.shape:
dark_image = subframe_image(dark_image, self.subframe_x,
self.subframe_y, self.width,
|
adviti/melange | thirdparty/google_appengine/lib/django_1_2/tests/modeltests/model_inheritance/tests.py | Python | apache-2.0 | 10,909 | 0.002933 | from operator import attrgetter
from django.conf import settings
from django.core.exceptions import FieldError
from django.db import connection
from django.test import TestCase
from models import (Chef, CommonInfo, ItalianRestaurant, ParkingLot, Place,
Post, Restaurant, Student, StudentWorker, Supplier, Worker, MixinModel)
class ModelInheritanceTests(TestCase):
def test_abstract(self):
# The Student and Worker models both have 'name' and 'age' fields on
# them and inherit the __unicode__() method, just as with normal Python
# subclassing. This is useful if you want to factor out common
# information for programming purposes, but still completely
# independent separate models at the database level.
w1 = Worker.objects.create(name="Fred", age=35, job="Quarry worker")
w2 = Worker.objects.cre | ate(name="Barney | ", age=34, job="Quarry worker")
s = Student.objects.create(name="Pebbles", age=5, school_class="1B")
self.assertEqual(unicode(w1), "Worker Fred")
self.assertEqual(unicode(s), "Student Pebbles")
# The children inherit the Meta class of their parents (if they don't
# specify their own).
self.assertQuerysetEqual(
Worker.objects.values("name"), [
{"name": "Barney"},
{"name": "Fred"},
],
lambda o: o
)
# Since Student does not subclass CommonInfo's Meta, it has the effect
# of completely overriding it. So ordering by name doesn't take place
# for Students.
self.assertEqual(Student._meta.ordering, [])
# However, the CommonInfo class cannot be used as a normal model (it
# doesn't exist as a model).
self.assertRaises(AttributeError, lambda: CommonInfo.objects.all())
# A StudentWorker which does not exist is both a Student and Worker
# which does not exist.
self.assertRaises(Student.DoesNotExist,
StudentWorker.objects.get, pk=12321321
)
self.assertRaises(Worker.DoesNotExist,
StudentWorker.objects.get, pk=12321321
)
# MultipleObjectsReturned is also inherited.
# This is written out "long form", rather than using __init__/create()
# because of a bug with diamond inheritance (#10808)
sw1 = StudentWorker()
sw1.name = "Wilma"
sw1.age = 35
sw1.save()
sw2 = StudentWorker()
sw2.name = "Betty"
sw2.age = 24
sw2.save()
self.assertRaises(Student.MultipleObjectsReturned,
StudentWorker.objects.get, pk__lt=sw2.pk + 100
)
self.assertRaises(Worker.MultipleObjectsReturned,
StudentWorker.objects.get, pk__lt=sw2.pk + 100
)
def test_multiple_table(self):
post = Post.objects.create(title="Lorem Ipsum")
# The Post model has distinct accessors for the Comment and Link models.
post.attached_comment_set.create(content="Save $ on V1agr@", is_spam=True)
post.attached_link_set.create(
content="The Web framework for perfections with deadlines.",
url="http://www.djangoproject.com/"
)
# The Post model doesn't have an attribute called
# 'attached_%(class)s_set'.
self.assertRaises(AttributeError,
getattr, post, "attached_%(class)s_set"
)
# The Place/Restaurant/ItalianRestaurant models all exist as
# independent models. However, the subclasses also have transparent
# access to the fields of their ancestors.
# Create a couple of Places.
p1 = Place.objects.create(name="Master Shakes", address="666 W. Jersey")
p2 = Place.objects.create(name="Ace Harware", address="1013 N. Ashland")
# Test constructor for Restaurant.
r = Restaurant.objects.create(
name="Demon Dogs",
address="944 W. Fullerton",
serves_hot_dogs=True,
serves_pizza=False,
rating=2
)
# Test the constructor for ItalianRestaurant.
c = Chef.objects.create(name="Albert")
ir = ItalianRestaurant.objects.create(
name="Ristorante Miron",
address="1234 W. Ash",
serves_hot_dogs=False,
serves_pizza=False,
serves_gnocchi=True,
rating=4,
chef=c
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Ash"), [
"Ristorante Miron",
],
attrgetter("name")
)
ir.address = "1234 W. Elm"
ir.save()
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Elm"), [
"Ristorante Miron",
],
attrgetter("name")
)
# Make sure Restaurant and ItalianRestaurant have the right fields in
# the right order.
self.assertEqual(
[f.name for f in Restaurant._meta.fields],
["id", "name", "address", "place_ptr", "rating", "serves_hot_dogs", "serves_pizza", "chef"]
)
self.assertEqual(
[f.name for f in ItalianRestaurant._meta.fields],
["id", "name", "address", "place_ptr", "rating", "serves_hot_dogs", "serves_pizza", "chef", "restaurant_ptr", "serves_gnocchi"],
)
self.assertEqual(Restaurant._meta.ordering, ["-rating"])
# Even though p.supplier for a Place 'p' (a parent of a Supplier), a
# Restaurant object cannot access that reverse relation, since it's not
# part of the Place-Supplier Hierarchy.
self.assertQuerysetEqual(Place.objects.filter(supplier__name="foo"), [])
self.assertRaises(FieldError,
Restaurant.objects.filter, supplier__name="foo"
)
# Parent fields can be used directly in filters on the child model.
self.assertQuerysetEqual(
Restaurant.objects.filter(name="Demon Dogs"), [
"Demon Dogs",
],
attrgetter("name")
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Elm"), [
"Ristorante Miron",
],
attrgetter("name")
)
# Filters against the parent model return objects of the parent's type.
p = Place.objects.get(name="Demon Dogs")
self.assertTrue(type(p) is Place)
# Since the parent and child are linked by an automatically created
# OneToOneField, you can get from the parent to the child by using the
# child's name.
self.assertEqual(
p.restaurant, Restaurant.objects.get(name="Demon Dogs")
)
self.assertEqual(
Place.objects.get(name="Ristorante Miron").restaurant.italianrestaurant,
ItalianRestaurant.objects.get(name="Ristorante Miron")
)
self.assertEqual(
Restaurant.objects.get(name="Ristorante Miron").italianrestaurant,
ItalianRestaurant.objects.get(name="Ristorante Miron")
)
# This won't work because the Demon Dogs restaurant is not an Italian
# restaurant.
self.assertRaises(ItalianRestaurant.DoesNotExist,
lambda: p.restaurant.italianrestaurant
)
# An ItalianRestaurant which does not exist is also a Place which does
# not exist.
self.assertRaises(Place.DoesNotExist,
ItalianRestaurant.objects.get, name="The Noodle Void"
)
# MultipleObjectsReturned is also inherited.
self.assertRaises(Place.MultipleObjectsReturned,
Restaurant.objects.get, id__lt=12321
)
# Related objects work just as they normally do.
s1 = Supplier.objects.create(name="Joe's Chickens", address="123 Sesame St")
s1.customers = [r, ir]
s2 = Supplier.objects.create(name="Luigi's Pasta", address="456 Sesame St")
s2.customers = [ir]
# This won't work because the Place we select is not a Restaurant (it's
# a Supplier).
|
flatangle/flatlib | flatlib/tools/planetarytime.py | Python | mit | 4,632 | 0.004751 | """
This file is part of flatlib - (C) FlatAngle
Author: João Ventura (flatangleweb@gmail.com)
This module provides useful functions for handling
planetary times.
The most import element is the HourTable class
which handles all queries to the planetary rulers
and hour rulers, including the start and ending
datetimes of each hour ruler.
"""
from flatlib import const
from flatlib.ephem import ephem
from flatlib.datetime import Datetime
# Planetary rulers starting at Sunday
DAY_RULERS = [
const.SUN,
const.MOON,
const.MARS,
const.MERCURY,
const.JUPITER,
const.VENUS,
const.SATURN
]
NIGHT_RULERS = [
const.JUPITER,
const.VENUS,
const.SATURN,
const.SUN,
const.MOON,
const.MARS,
const.MERCURY
]
# Planetary hours round list starting
# at Sunday's sunrise
ROUND_LIST = [
const.SUN,
const.VENUS,
const.MERCURY,
const.MOON,
const.SATURN,
const.JUPITER,
const.MARS
]
# === Private functions === #
def nthRuler(n, dow):
""" Returns the n-th hour ruler since last sunrise
by day of week. Both arguments are zero based.
"""
index = (dow * 24 + n) % 7
return ROUND_LIST[index]
def hourTable(date, pos):
""" Creates the planetary hour table for a date
and position.
The table includes both diurnal and nocturnal
hour sequences and each of the 24 entries (12 * 2)
are like (startJD, endJD, ruler).
"""
lastSunrise = ephem.lastSunrise(date, pos)
middleSunset = ephem.nextSunset(lastSunrise, pos)
nextSunrise = ephem.nextSunrise(date, pos)
table = []
# Create diurnal hour sequence
length = (middleSunset.jd - lastSunrise.jd) / 12.0
for i in range(12):
start = lastSunrise.jd + i * length
end = start + length
ruler = nthRuler(i, lastSunrise.date.dayofweek())
table.append([start, end, ruler])
# Create nocturnal hour sequence
length = (nextSunrise.jd - middleSunset.jd) / 12.0
for i in range(12):
start = middleSunset.jd + i * length
end = start + length
ruler = nthRuler(i + 12, lastSunrise.date.dayofweek())
table.append([start, end, ruler])
return table
def getHourTable(date, pos):
""" Returns an HourTable object. """
table = hourTable(date, pos)
return HourTable(table, date)
# ------------------- #
# HourTable Class #
# ------------------- #
class HourTable:
""" This class represents a Planetary Hour Table
and includes methods to access its properties.
"""
def __init__(self, table, date):
self.table = table
self.date = date
self.currIndex = self.index(date)
def index(self, date):
""" Returns the index of a date in the table. """
for (i, (start, end, ruler)) in enumerate(self.table):
if start <= date.jd <= end:
return i
return None
# === Properties === #
def dayRuler(self):
""" Returns the current day ruler. """
return self.table[0][2]
def nightRuler(self):
""" Returns the current night ruler. """
return self.table[12][2]
def currRuler(self):
""" Returns the current day or night
ruler considering if it's day or night.
"""
if self.currIndex < 12:
return self.dayRuler()
else:
return self.nightRuler()
def hourRuler(self):
""" Returns | the current hour ruler. """
return self.table[self.currIndex] | [2]
def currInfo(self):
""" Returns information about the current
planetary time.
"""
return self.indexInfo(self.currIndex)
def indexInfo(self, index):
""" Returns information about a specific
planetary time.
"""
entry = self.table[index]
info = {
# Default is diurnal
'mode': 'Day',
'ruler': self.dayRuler(),
'dayRuler': self.dayRuler(),
'nightRuler': self.nightRuler(),
'hourRuler': entry[2],
'hourNumber': index + 1,
'tableIndex': index,
'start': Datetime.fromJD(entry[0], self.date.utcoffset),
'end': Datetime.fromJD(entry[1], self.date.utcoffset)
}
if index >= 12:
# Set information as nocturnal
info.update({
'mode': 'Night',
'ruler': info['nightRuler'],
'hourNumber': index + 1 - 12
})
return info
|
clebergnu/autotest | client/common_lib/profiler_manager.py | Python | gpl-2.0 | 3,234 | 0.00402 | import os, sys
import common
from autotest_lib.client.common_lib import error, utils, packages
class ProfilerNotPresentError(error.JobError):
def __init_ | _(self, name, *args, **dargs):
msg = "%s not present" % name
error.JobError.__init__(self, msg, *args, **dargs)
class profiler_manager(object):
def __init__(self, job):
self.job = job
self.list = []
self.tmpdir = job.tmpdir
self.profile_run_only = False
self.active_flag = False
self.created_dirs = []
def load_profiler(self, profiler, ar | gs, dargs):
""" Given a name and args, loads a profiler, initializes it
with the required arguments, and returns an instance of it. Raises
a ProfilerNotPresentError if the module isn't found. """
raise NotImplementedError("load_profiler not implemented")
def add(self, profiler, *args, **dargs):
""" Add a profiler """
new_profiler = self.load_profiler(profiler, args, dargs)
self.list.append(new_profiler)
def delete(self, profiler):
""" Remove a profiler """
self.list = [p for p in self.list if p.name != profiler]
def current_profilers(self):
""" Returns a set of the currently enabled profilers """
return set(p.name for p in self.list)
def present(self):
""" Indicates if any profilers are enabled """
return len(self.list) > 0
def only(self):
""" Returns True if job is supposed to be run only with profiling
turned on, False otherwise """
return self.profile_run_only
def set_only(self, value):
""" Changes the flag which determines whether or not the job is to be
run without profilers at all """
self.profile_run_only = value
def before_start(self, test):
"""
Override to do any setup needed before actually starting the profilers
(this function is called before calling test.before_run_once() and
profilers.start() in a profiled run).
"""
pass
def start(self, test):
""" Start all enabled profilers """
for p in self.list:
p.start(test)
self.active_flag = True
def stop(self, test):
""" Stop all enabled profilers """
for p in self.list:
p.stop(test)
self.active_flag = False
def active(self):
""" Returns True if profilers are present and started, False
otherwise """
return self.present() and self.active_flag
def report(self, test):
""" Report on all enabled profilers """
for p in self.list:
p.report(test)
if getattr(test, 'iteration', None):
name = 'iteration.%s' % test.iteration
iter_path = os.path.join(test.profdir, name)
os.system('mkdir -p %s' % iter_path)
self.created_dirs.append(name)
for file in os.listdir(test.profdir):
if file in self.created_dirs:
continue
file_path = os.path.join(test.profdir, file)
iter_path_file = os.path.join(iter_path, file)
os.rename(file_path, iter_path_file)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.