repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
wxgeo/geophar | wxgeometrie/sympy/series/tests/test_approximants.py | Python | gpl-2.0 | 962 | 0.007277 | from sympy.core.compatibility import range
from sympy.series import approximants
from sympy import lucas, fibonacci, symbols, binomial
def test_approximants():
x, t = symbols("x,t")
g = [lucas(k) for k in range(16)]
assert [e for e in approximants(g)] == (
[2, -4/(x - 2), (5*x - 2)/(3*x - 1), (x - 2)/(x**2 + x - 1)] )
g = [lucas(k)+fibonacci(k+2) for k in range(16)]
assert [e for e in approximants(g)] == (
[3, -3 | /(x - 1), (3*x - 3)/(2*x - 1), -3/(x**2 + x - 1)] )
g = [lucas(k)**2 for k in range(16)]
assert [e for e in approximants(g)] == (
[4, -16/(x - 4), (35*x - 4)/(9*x - 1), (37*x - 28)/(13*x**2 + 11*x - 7),
(50*x**2 + 63*x - 52)/(37*x**2 + 19*x - 13),
(-x**2 - 7*x + 4)/(x**3 - 2*x**2 - 2*x + 1)] )
p = [sum(binomial(k,i)*x**i | for i in range(k+1)) for k in range(16)]
y = approximants(p, t, simplify=True)
assert next(y) == 1
assert next(y) == -1/(t*(x + 1) - 1)
|
edx/ecommerce | ecommerce/extensions/order/management/commands/tests/test_update_order_lines_partner.py | Python | agpl-3.0 | 2,921 | 0.001369 |
import ddt
from django.core.management import call_command
from django.core.management.base import CommandError
from mock import patch
from oscar.core.loading import get_model
from ecommerce.extensions.test.factories import create_order
from ecommerce.tests.factories import PartnerFactory
from ecommerce.tests.testcases import TestCase
LOGGER_NAME = 'ecommerce.extensions.order.management.commands.update_order_lines_partner'
OrderLine = get_model('order', 'Line')
@ddt.ddt
class UpdateOrderLinePartnerTests(TestCase):
"""Tests for update_order_lines_partner management command."""
PARTNER_CODE = 'testX'
YES_NO_PATCH_LOCATION = 'ecommerce.extensions.order.management.commands.update_order_lines_partner.query_yes_no'
def assert_error_log(self, error_msg, *args):
"""Helper to call command and assert error log."""
with self.assertRaisesRegex(CommandError, error_msg):
call_command('update_order_lines_partner', *args)
def test_partner_required(self):
"""Test that command raises partner required error."""
err_msg = 'Error: the following arguments are required: --partner'
self.assert_error_log(
err_msg,
'sku12345'
)
def test_partner_does_not_exist(self):
"""Test | that command raises partner does not exist error."""
self.assert_error_log(
'No Partner exists for code {}.'.format(self.PARTNER_CODE),
'sku12345',
'--partner={}'.format(self.PARTNER_CODE)
)
def test_one_or_more_sku_required(self):
"""Test that command raises one or more SKUs required | error."""
self.assert_error_log(
'update_order_lines_partner requires one or more <SKU>s.',
'--partner={}'.format(self.PARTNER_CODE)
)
@ddt.data(True, False)
def test_update_order_lines_partner(self, yes_no_value):
"""Test that command works as expected."""
new_partner = PartnerFactory(short_code=self.PARTNER_CODE)
order = create_order()
order_line = order.lines.first()
self.assertNotEqual(order_line.partner, new_partner)
with patch(self.YES_NO_PATCH_LOCATION) as mocked_yes_no:
mocked_yes_no.return_value = yes_no_value
call_command('update_order_lines_partner', order_line.partner_sku, '--partner={}'.format(self.PARTNER_CODE))
order_line = OrderLine.objects.get(partner_sku=order_line.partner_sku)
if yes_no_value:
# Verify that partner is updated
self.assertEqual(order_line.partner, new_partner)
self.assertEqual(order_line.partner_name, new_partner.name)
else:
# Verify that partner is not updated
self.assertNotEqual(order_line.partner, new_partner)
self.assertNotEqual(order_line.partner_name, new_partner.name)
|
jedie/PyHardlinkBackup | pyhardlinkbackup/backup_app/management/commands/add.py | Python | gpl-3.0 | 422 | 0 | from django.core.management.base import BaseCommand
# https://github.com/jedie/PyHardLinkBackup
from pyhardlinkbackup.phlb.add import add_all_backups
class Command(BaseCommand):
help = 'Scan all existing backup and add missing ones to database.'
def handle(self, *args | , **options):
self.stdout.write(f'\n\n{self.help}\n')
add_all_backups()
self.stdout.write(self.style.SUCCESS('done') | )
|
redfern314/amazedFirmware | bootloader/software/hellousb.py | Python | mit | 1,389 | 0.00504 |
import usb.core
class hellousb(object):
def __init__(self):
self.HELLO = 0
self.SET_VALS = 1
self.GET_VALS = 2
self.PRINT_VALS = 3
self.dev = usb.core.find(idVendor = 0x6666, idProduct = 0x0003)
if self.dev is None:
raise ValueError('no USB device found matching idVendor = 0x6666 and idProduct = 0x0003')
self.dev.set_configuration()
def close(self):
self.dev = None
def hello(self):
try:
self.dev.ctrl_transfer(0x40, self.HELLO)
except usb.core.USBError:
print "Could not send HELLO vendor reque | st."
def set_vals(self, val1, val2):
try:
self.dev.ctrl_transfer(0x40, self.SET_VALS, int(val1), int(val2))
except usb.core.USBError:
prin | t "Could not send SET_VALS vendor request."
def get_vals(self):
try:
ret = self.dev.ctrl_transfer(0xC0, self.GET_VALS, 0, 0, 4)
except usb.core.USBError:
print "Could not send GET_VALS vendor request."
else:
return [int(ret[0])+int(ret[1])*256, int(ret[2])+int(ret[3])*256]
def print_vals(self):
try:
self.dev.ctrl_transfer(0x40, self.PRINT_VALS)
except usb.core.USBError:
print "Could not send PRINT_VALS vendor request."
|
nadege/food-organizer | backend/apps/project/views.py | Python | gpl-3.0 | 1,258 | 0 | import logging
import os
from django.shortcuts import render
f | rom django.views.generic import View
from django.http import HttpResponse
from django.conf import settings
class FrontendAppView(View):
"""
Serves the compiled frontend entry point (only works i | f you have run `make
build-frontend`).
"""
def get(self, request):
try:
with open(
os.path.join(settings.FRONTEND_DIR, 'build', 'index.html'),
) as frontend:
return HttpResponse(frontend.read())
except FileNotFoundError:
logging.exception('Production build of app not found')
return HttpResponse(
"""
This URL is only used when you have built the production
version of the app. Visit http://localhost:3000/ instead, or
run `make build-frontend` to test the production version.
""",
status=501,
)
def index(request):
context = {
'component': 'App',
'props': {
'env': 'Django',
'user': {
'username': request.user.username,
},
},
}
return render(request, 'index.html', context)
|
Balannen/LSMASOMM | atom3/Formalisms/LSMASOMM/canNotCommunicateWith.py | Python | gpl-3.0 | 3,261 | 0.028519 | """
__canNotCommunicateWith.py_____________________________________________________
Automatically generated AToM3 syntactic obj | ect (DO NOT MODIFY DIRECTLY)
Author: bogdan
Modified: Fri Jul 1 01:29:28 2016
_______________________________________________________________________________
"""
from ASGNode import *
from ATOM3Type import *
from graph_canNotCommunicateWith import *
class canNotCommunicateWith(ASGNode, ATOM | 3Type):
def __init__(self, parent = None):
ASGNode.__init__(self)
ATOM3Type.__init__(self)
self.graphClass_ = graph_canNotCommunicateWith
self.isGraphObjectVisual = True
if(hasattr(self, '_setHierarchicalLink')):
self._setHierarchicalLink(False)
if(hasattr(self, '_setHierarchicalNode')):
self._setHierarchicalNode(False)
self.parent = parent
self.generatedAttributes = { }
self.realOrder = []
self.directEditing = []
def clone(self):
cloneObject = canNotCommunicateWith( self.parent )
for atr in self.realOrder:
cloneObject.setAttrValue(atr, self.getAttrValue(atr).clone() )
ASGNode.cloneActions(self, cloneObject)
return cloneObject
def copy(self, other):
ATOM3Type.copy(self, other)
for atr in self.realOrder:
self.setAttrValue(atr, other.getAttrValue(atr) )
ASGNode.copy(self, other)
def preCondition (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.preCondition(actionID, params)
else: return None
def postCondition (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.postCondition(actionID, params)
else: return None
def preAction (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.preAction(actionID, params)
else: return None
def postAction (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.postAction(actionID, params)
else: return None
def QOCA(self, params):
"""
QOCA Constraint Template
NOTE: DO NOT select a POST/PRE action trigger
Constraints will be added/removed in a logical manner by other mechanisms.
"""
return # <--- Remove this if you want to use QOCA
# Get the high level constraint helper and solver
from Qoca.atom3constraints.OffsetConstraints import OffsetConstraints
oc = OffsetConstraints(self.parent.qocaSolver)
# Constraint only makes sense if there exists 2 objects connected to this link
if(not (self.in_connections_ and self.out_connections_)): return
# Get the graphical objects (subclass of graphEntity/graphLink)
graphicalObjectLink = self.graphObject_
graphicalObjectSource = self.in_connections_[0].graphObject_
graphicalObjectTarget = self.out_connections_[0].graphObject_
objTuple = (graphicalObjectSource, graphicalObjectTarget, graphicalObjectLink)
"""
Example constraint, see Kernel/QOCA/atom3constraints/OffsetConstraints.py
For more types of constraints
"""
oc.LeftExactDistance(objTuple, 20)
oc.resolve() # Resolve immediately after creating entity & constraint
|
awhittle3/pyBattleship | playAnchors.py | Python | apache-2.0 | 325 | 0.009231 | from playSounds import play
BEAT = 500
HALF = int(BEAT/2)
QUART = int(BEAT/4)
def playSong():
play('C4' ,BEAT*2)
play('E4', BEAT)
play('G4', BEAT)
play('A4', BEAT + 3*QUART)
play('E4', QUART)
play('A4', 2*BEAT)
play | ('C5', BEAT*2)
play('D5', BEAT)
play | ('G4', BEAT)
play('C5', BEAT*4)
|
lmazuel/azure-sdk-for-python | azure-mgmt-billing/azure/mgmt/billing/operations/invoices_operations.py | Python | mit | 11,810 | 0.002202 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from .. import models
class InvoicesOperations(object):
"""InvoicesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Version of the API to be used with the client request. The current version is 2018-03-01-preview. Constant value: "2018-03-01-preview".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2018-03-01-preview"
self.config = config
def list(
self, expand=None, filter=None, skiptoken=None, top=None, custom_headers=None, raw=False, **operation_config):
"""Lists the available invoices for a subscription in reverse
chronological order beginning with the most recent invoice. In preview,
invoices are available via this API only for invoice periods which end
December 1, 2016 or later. This is only supported for Azure Web-Direct
subscriptions. Other subscription types which were not purchased
directly through the Azure web portal are not supported through this
preview API.
:param expand: May be used to expand the downloadUrl property within a
list of invoices. This enables download links to be generated for
multiple invoices at once. By default, downloadURLs are not included
when listing invoices.
:type expand: str
:param filter: May be used to filter invoices by invoicePeriodEndDate.
The filter supports 'eq', 'lt', 'gt', 'le', 'ge', and 'and'. It does
not currently support 'ne', 'or', or 'not'.
:type filter: str
:param skiptoken: Skiptoken is only used if a previous operation
returned a partial result. If a previous response contains a nextLink
element, the value of the nextLink element will include a skiptoken
parameter that specifies a starting point to use for subsequent calls.
:type skiptoken: str
:param top: May be used to limit the number of results to the most
recent N invoices.
:type top: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Invoice
:rtype:
~azure.mgmt.billing.models.InvoicePaged[~azure.mgmt.billing.models.Invoice]
:raises:
:class:`ErrorResponseException<azure.mgmt.billing.models.ErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
| if skiptoken is not None:
query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=100, minimum=1)
else:
url = next_link
query_parameters = {}
# Construct headers
| header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.InvoicePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.InvoicePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Billing/invoices'}
def get(
self, invoice_name, custom_headers=None, raw=False, **operation_config):
"""Gets a named invoice resource. When getting a single invoice, the
downloadUrl property is expanded automatically. This is only supported
for Azure Web-Direct subscriptions. Other subscription types which were
not purchased directly through the Azure web portal are not supported
through this preview API.
:param invoice_name: The name of an invoice resource.
:type invoice_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Invoice or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.billing.models.Invoice or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<azure.mgmt.billing.models.ErrorResponseException>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'invoiceName': self._serialize.url("invoice_name", invoice_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_langu |
jasonwee/asus-rt-n14uhp-mrtg | src/lesson_file_system/codecs_bom_create_file.py | Python | apache-2.0 | 796 | 0 | import codecs
from codecs_to_hex import to_hex
# Pick the nonnative version of UTF-16 encoding
if codecs.BOM_UTF16 == codecs.BOM_UTF16_BE:
bom = codecs.BOM_UTF16 | _LE
encoding = 'utf_16_le'
else:
bom = codecs.BOM_UTF16_BE
encoding = 'utf_16_be'
print('Native order :', to_hex(codecs.BOM_UTF16, 2))
print('Selected order:', to_hex(bom, 2))
# Encode the text.
encoded_text = 'français'.encode(encoding)
print('{:14}: {}'.format(encoding, to_hex(encoded_text, 2)))
with open('nonnative-encoded.txt', mode='wb') as f:
# Write the selected byte-order marker. | It is not included
# in the encoded text because the byte order was given
# explicitly when selecting the encoding.
f.write(bom)
# Write the byte string for the encoded text.
f.write(encoded_text)
|
NicolasDorier/bitcoin | qa/rpc-tests/wallet.py | Python | mit | 18,545 | 0.00615 | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class WalletTest (BitcoinTestFramework):
def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size):
"""Return curr_balance after asserting the fee was in range"""
fee = balance_with_fee - curr_balance
assert_fee_amount(fee, tx_size, fee_per_byte * 1000)
return curr_balance
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 4
self.extra_args = [['-usehd={:d}'.format(i%2==0)] for i in range(4)]
def setup_network(self, split=False):
self.nodes = start_nodes(3, self.options.tmpdir, self.extra_args[:3])
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test (self):
# Check that there's no UTXO on none of the nodes
assert_equal(len(self.nodes[0].listunspent()), 0)
assert_equal(len(self.nodes[1].listunspent()), 0)
assert_equal(len(self.nodes[2].listunspent()), 0)
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
walletinfo = self.nodes[0].getwalletinfo()
assert_equal(walletinfo['immature_balance'], 50)
assert_equal(walletinfo['balance'], 0)
self.sync_all()
self.nodes[1].generate(101)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 50)
assert_equal(self.nodes[1].getbalance(), 50)
assert_equal(self.nodes[2].getbalance(), 0)
# Check that only first and second nodes have UTXOs
assert_equal(len(self.nodes[0].listunspent()), 1)
assert_equal(len(self.nodes[1].listunspent()), 1)
assert_equal(len(self.nodes[2].listunspent()), 0)
# Send 21 BTC from 0 to 2 using sendtoaddress call.
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
walletinfo = self.nodes[0].getwalletinfo()
assert_equal(walletinfo['immature_balance'], 0)
# Have node0 mine a block, thus it will collect its own fee.
self.nodes[0].generate(1)
self.sync_all()
# Exercise locking of unspent outputs
unspent_0 = self.nodes[2].listunspent()[0]
unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
self.nodes[2].lockunspent(False, [unspent_0])
assert_raises_message(JSONRPCException, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20)
assert_equal([unspent_0], self.nodes[2].listlockunspent())
self.nodes[2].lockunspent(True, [unspent_0])
assert_equal(len(self.nodes[2].listlockunspent()), 0)
# Have node1 generate 100 blocks (so node0 can recover the fee)
self.nodes[1].generate(100)
self.sync_all()
# node0 should end up with 100 btc in block rewards plus fees, but
# minus the 21 plus fees sent to node2
assert_equal(self.nodes[0].getbalance(), 100-21)
assert_equal(self.nodes[2].getbalance(), 21)
# Node0 should have two unspent outputs.
# Create a couple of transactions to send them to node2, submit them through
# node1, and make sure both node0 and node2 pick them up properly:
node0utxos = self.nodes[0].listunspent(1)
assert_equal(len(node0utxos), 2)
# create both transactions
txns_to_send = []
for utxo in node0utxos:
inputs = []
outputs = {}
inputs.append({ "txid" : utxo["txid"], "vout" : utxo["vout"]})
outputs[self.nodes[2].getnewaddress("from1")] = utxo["amount"] - 3
raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
txns_to_send.append(self.nodes[0].signrawtransaction(raw_tx))
# Have node 1 (miner) send the transactions
self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True)
self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True)
# Have node1 mine a block to confirm transactions:
self.nodes[1].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 94)
assert_equal(self.nodes[2].getbalance("from1"), 94-21)
# Send 10 BTC normal
address = self.nodes[0].getnewaddress("test")
fee_per_byte = Decimal('0.001') / 1000
self.nodes[2].s | ettxfee(fee_per_byte * 1000)
txid = self.nodes[2].sendtoaddress(address, 10, "", "", False)
self.nodes[2].generate(1)
self.sync_all()
node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('84'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid) | ))
assert_equal(self.nodes[0].getbalance(), Decimal('10'))
# Send 10 BTC with subtract fee from amount
txid = self.nodes[2].sendtoaddress(address, 10, "", "", True)
self.nodes[2].generate(1)
self.sync_all()
node_2_bal -= Decimal('10')
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('20'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
# Sendmany 10 BTC
txid = self.nodes[2].sendmany('from1', {address: 10}, 0, "", [])
self.nodes[2].generate(1)
self.sync_all()
node_0_bal += Decimal('10')
node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
assert_equal(self.nodes[0].getbalance(), node_0_bal)
# Sendmany 10 BTC with subtract fee from amount
txid = self.nodes[2].sendmany('from1', {address: 10}, 0, "", [address])
self.nodes[2].generate(1)
self.sync_all()
node_2_bal -= Decimal('10')
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
# Test ResendWalletTransactions:
# Create a couple of transactions, then start up a fourth
# node (nodes[3]) and ask nodes[0] to rebroadcast.
# EXPECT: nodes[3] should have those transactions in its mempool.
txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
sync_mempools(self.nodes)
self.nodes.append(start_node(3, self.options.tmpdir, self.extra_args[3]))
connect_nodes_bi(self.nodes, 0, 3)
sync_blocks(self.nodes)
relayed = self.nodes[0].resendwallettransactions()
assert_equal(set(relayed), {txid1, txid2})
sync_mempools(self.nodes)
assert(txid1 in self.nodes[3].getrawmempool())
# Exercise balance rpcs
assert_equal(self.nodes[0].getwalletinfo()["unconfirmed_balance"], 1)
assert_equal(self.nodes[0].getunconfirmedbalance(), 1)
#check if we can list zero value tx as available coins
#1. create rawtx
#2. hex-changed one output to 0.0
#3. sign and send
#4. check if recipient (node0) can list the zero value tx
usp = self.nodes[1].listunspent()
inputs = [{"txid":usp[0]['txid'], "vout":usp[0]['vout']}]
outputs = {self.nodes[1].getnewaddress(): 49.998, self.nodes[0].getnewaddress(): 11.11}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000") #replace 11.11 with 0.0 (int32)
decRawTx = self.nodes[1].decoderawtransaction(rawTx)
signedRawTx = self.nodes[1].signrawtr |
impallari/Impallari-Fontlab-Macros | IMP Testing/Save EPS from Glyph.py | Python | apache-2.0 | 436 | 0.009174 | #FLM: Save EPS from selected Glyph
# Description:
# Generate a EPS file for the Selected Glyph
# Credits:
# Pablo Impallari
# http://www.impallari.com
import os.path
from FL import *
from robofab.world import CurrentFont
# Get current Glyphs
f = CurrentFont()
g = fl.glyph
# Get path and file name
path = f.path
dir, fileName = os.path.split(path)
filename = dir + '/' + g.n | ame + '.eps'
# Generate EP | S file
g.SaveEPS( filename )
|
micha-shepher/oervoer-wizard | oervoer-django/oervoer/src/testimport.py | Python | gpl-3.0 | 490 | 0.004082 | import pprint
from wizard.importoervoer import ImportOrders, Credentials, ImportProds, ImportSmaak, ImportVlees
imp = ImportOrders(Credentials.user, Credentials.pw)
if imp.connect():
savecur = imp.get_cur()
imp.importtable()
imp = ImportProds(Credentials.user, Credentials.pw, savecur)
pprint.pprint(imp.importtable())
imp = ImportSmaak(Credentials.user, Credentials.pw, savecur)
imp.importtable()
imp = ImportVlees(Credential | s.user, Credentials.pw, savecur)
imp.importtab | le()
|
asifpy/django-crudbuilder | crudbuilder/views.py | Python | apache-2.0 | 8,139 | 0.000491 | from django.forms.models import modelform_factory
from django.views.generic import (
DetailView,
CreateView,
UpdateView,
DeleteView,
TemplateView
)
from django_tables2 import SingleTableView
from crudbuilder.registry import registry
from crudbuilder.mixins import (
CrudBuilderMixin,
BaseListViewMixin,
CreateUpdateViewMixin,
InlineFormsetViewMixin,
BaseDetailViewMixin,
LoginRequiredMixin
)
from crudbuilder.abstract import BaseBuilder
from crudbuilder.tables import TableBuilder
from crudbuilder.helpers import (
model_class_form,
plural,
reverse_lazy
)
class ViewBuilder(BaseBuilder):
"""View builder which returns all the CRUD class based views"""
def __init__(self, *args, **kwargs):
super(ViewBuilder, self).__init__(*args, **kwargs)
self.classes = {}
def generate_crud(self):
self.generate_list_view()
self.generate_create_view()
self.generate_detail_view()
self.generate_update_view()
self.generate_delete_view()
def get_actual_form(self, view):
if self.createupdate_forms and self.createupdate_forms.get(view, None):
return self.createupdate_forms.get(view)
elif self.custom_modelform:
return self.custom_modelform
else:
return self.generate_modelform()
def get_actual_table(self):
if self.custom_table2:
return self.custom_table2
else:
table_builder = TableBuilder(self.app, self.model, self.crud)
return table_builder.generate_table()
def generate_modelform(self):
"""Generate modelform from Django modelform_factory"""
model_class = self.get_model_class
excludes = self.modelform_excludes if self.modelform_excludes else []
_ObjectForm = modelform_factory(model_class, exclude=excludes)
return _ObjectForm
def get_template(self, tname):
"""
- Get custom template from CRUD class, if it is defined in it
- No custom template in CRUD class, then use the default template
"""
if self.custom_templates and self.custom_templates.get(tname, None):
return self.custom_templates.get(tname)
elif self.inlineformset:
return 'crudbuilder/inline/{}.html'.format(tname)
else:
return 'crudbuilder/instance/{}.html'.format(tname)
def get_createupdate_mixin(self):
if self.inlineformset:
return InlineFormsetViewMixin
else:
return CreateUpdateViewMixin
def generate_list_view(self):
"""Generate class based view for ListView"""
name = model_class_form(self.model + 'ListView')
list_args = dict(
model=self.get_model_class,
context_object_name=plural(self.model),
template_name=self.get_template('list'),
table_class=self.get_actual_table(),
context_table_name='table_objects',
crud=self.crud,
permissions=self.view_permission('list'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
table_pagination={'per_page': self.tables2_pagination or 10},
custom_queryset=self.custom_queryset,
custom_context=self.custom_context,
custom_postfix_url=self.custom_postfix_url
)
parent_classes = [BaseListViewMixin, SingleTableView]
if self.custom_list_view_mixin:
parent_classes.insert(0, self.custom_list_view_mixin)
list_class = type(
name,
tuple(parent_classes),
list_args
)
self.classes[name] = list_class
return list_class
def generate_create_view(self):
"""Generate class based view for CreateView"""
name = model_class_form(self.model + 'CreateView')
create_args = dict(
form_class=self.get_actual_form('create'),
model=self.get_model_class,
template_name=self.get_template('create'),
permissions=self.view_permission('create'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
inlineformset=self.inlineformset,
success_url=reverse_lazy('{}-{}-list'.format(self.app, self.custom_postfix_url)),
custom_form=self.createupdate_forms or self.custom_modelform,
custom_postfix_url=self.custom_postfix_url
)
parent_classes = [self.get_createupdate_mixin(), CreateView]
if self.custom_create_view_mixin:
parent_classes.insert(0, self.custom_create_view_mixin)
create_class = type(
name,
tuple(parent_classes),
create_args
)
self.classes[name] = create_class
return create_class
def generate_detail_view(self):
"""Generate class based view for DetailView"""
name = model_class_form(self.model + 'DetailView')
detail_args = dict(
detailview_excludes=self.detailview_excludes,
model=self.get_model_class,
template_name=self.get_template('detail'),
login_required=self.check_logi | n_required,
permissions=self.view_permission('detail'),
inlineformset=self.inlineformset,
permission_required=self.check_permission_required,
custom_detail_context= | self.custom_detail_context,
custom_postfix_url=self.custom_postfix_url
)
detail_class = type(name, (BaseDetailViewMixin, DetailView), detail_args)
self.classes[name] = detail_class
return detail_class
def generate_update_view(self):
"""Generate class based view for UpdateView"""
name = model_class_form(self.model + 'UpdateView')
update_args = dict(
form_class=self.get_actual_form('update'),
model=self.get_model_class,
template_name=self.get_template('update'),
permissions=self.view_permission('update'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
inlineformset=self.inlineformset,
custom_form=self.createupdate_forms or self.custom_modelform,
success_url=reverse_lazy('{}-{}-list'.format(self.app, self.custom_postfix_url)),
custom_postfix_url=self.custom_postfix_url
)
parent_classes = [self.get_createupdate_mixin(), UpdateView]
if self.custom_update_view_mixin:
parent_classes.insert(0, self.custom_update_view_mixin)
update_class = type(
name,
tuple(parent_classes),
update_args
)
self.classes[name] = update_class
return update_class
def generate_delete_view(self):
"""Generate class based view for DeleteView"""
name = model_class_form(self.model + 'DeleteView')
delete_args = dict(
model=self.get_model_class,
template_name=self.get_template('delete'),
permissions=self.view_permission('delete'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
success_url=reverse_lazy('{}-{}-list'.format(self.app, self.custom_postfix_url)),
custom_postfix_url=self.custom_postfix_url
)
parent_classes = [CrudBuilderMixin, DeleteView]
if self.custom_delete_view_mixin:
parent_classes.insert(0, self.custom_delete_view_mixin)
delete_class = type(
name,
tuple(parent_classes),
delete_args
)
self.classes[name] = delete_class
return delete_class
class CrudListView(LoginRequiredMixin, TemplateView):
template_name = "crudbuilder/cruds.html"
title = "Registered Cruds"
login_required = False
def cruds(self):
return registry.items()
crudlist_view = CrudListView.as_view()
|
fuzeman/trakt.py | tests/test_helpers.py | Python | mit | 519 | 0 | from __future__ import absolute_import, division, print_function
from trakt.core.h | elpers import from_iso8601_datetime, to_iso8601_datetime
from hamcrest import assert_that, equal_to
import pytest
try:
import arrow
except ImportError:
arrow = None
def test_datetime_conversion():
if arrow is None:
pytest.skip('arrow not installed, skipping')
d = arrow.utcnow().replace(microsecond=0)
d_str = to_iso8601_datetime(d)
d2 = from_iso8601_datetim | e(d_str)
assert_that(d, equal_to(d2))
|
opennode/nodeconductor-assembly-waldur | src/waldur_mastermind/marketplace/migrations/0023_category_i18n.py | Python | mit | 917 | 0 | # Generated by Django 2.2.13 on 2020-09-30 11:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('marketplace', '0022_extend_description_limits'),
]
operations = [
migrations.AddField(
model_name='category',
name='description | _en',
field=models.TextField(blank=Tr | ue, null=True),
),
migrations.AddField(
model_name='category',
name='description_et',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='category',
name='title_en',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='category',
name='title_et',
field=models.CharField(max_length=255, null=True),
),
]
|
cpennington/edx-platform | lms/djangoapps/courseware/tests/test_rules.py | Python | agpl-3.0 | 1,680 | 0.00119 | """
Tests for permissions defined in courseware.rules
"""
import ddt
import six
from django.test import TestCase
from opaque_keys.edx.locator import CourseLocator
from course_modes.tests.factories import CourseModeFactory
from student.models import Co | urseEnrollment
from student.tests.factories import UserFactory
@ddt.ddt
class PermissionTests(TestCase):
"""
Tests for permissions defined in courseware.rules
"""
def setUp(self):
super(PermissionTests, self).setUp()
self.user = UserFactory()
self.course_id = CourseLocator('MITx', '000', 'Perm_course')
CourseModeFactory(mode_slug='verified', course_id=self.course_id)
CourseModeFactory(mode_slug='masters', course_id=self.course_id) |
CourseModeFactory(mode_slug='professional', course_id=self.course_id)
CourseEnrollment.unenroll(self.user, self.course_id)
def tearDown(self):
super(PermissionTests, self).tearDown()
self.user.delete()
@ddt.data(
(None, False),
('audit', False),
('verified', True),
('masters', True),
('professional', True),
('no-id-professional', False),
)
@ddt.unpack
def test_proctoring_perm(self, mode, should_have_perm):
"""
Test that the user has the edx_proctoring.can_take_proctored_exam permission
"""
if mode is not None:
CourseEnrollment.enroll(self.user, self.course_id, mode=mode)
has_perm = self.user.has_perm('edx_proctoring.can_take_proctored_exam',
{'course_id': six.text_type(self.course_id)})
assert has_perm == should_have_perm
|
zhangyage/Python-oldboy | my_request/img_request.py | Python | apache-2.0 | 414 | 0.014778 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import requests
def download_img():
'''
@下载图片
'''
url = "https://www.jcd6.com/Public/Home/new/images/process_03.png"
response = requests.get(url,stream=True)
with open('demo.png','wb') as fd:
for chunk in respons | e.iter_content(128):
fd.write(chunk)
| if __name__ == '__main__':
download_img() |
expectocode/Telethon | telethon/hints.py | Python | mit | 1,562 | 0.00128 | import datetime
import typing
from . import helpers
from .tl import types, custom
Phone = str
Username = str
PeerID = int
Entity = typing.Union[types.User, types.Chat, types.Channel]
FullEntity = typing.Union[types.UserFull, types.messages.ChatFull, types.ChatFull, types.ChannelFull]
EntityLike = typing.Union[
| Phone,
Username,
PeerID,
types.TypePeer,
types.TypeInputPeer,
Entity,
FullEntity
]
EntitiesLike = typing.Union[EntityLike, typing.Sequence[EntityLike]]
ButtonLike = typing.Union[types.TypeKeyboardButton, custom.Button]
MarkupLike = typing.Union[
types.TypeReplyMarkup,
| ButtonLike,
typing.Sequence[ButtonLike],
typing.Sequence[typing.Sequence[ButtonLike]]
]
TotalList = helpers.TotalList
DateLike = typing.Optional[typing.Union[float, datetime.datetime, datetime.date, datetime.timedelta]]
LocalPath = str
ExternalUrl = str
BotFileID = str
FileLike = typing.Union[
LocalPath,
ExternalUrl,
BotFileID,
bytes,
typing.BinaryIO,
types.TypeMessageMedia,
types.TypeInputFile,
types.TypeInputFileLocation
]
# Can't use `typing.Type` in Python 3.5.2
# See https://github.com/python/typing/issues/266
try:
OutFileLike = typing.Union[
str,
typing.Type[bytes],
typing.BinaryIO
]
except TypeError:
OutFileLike = typing.Union[
str,
typing.BinaryIO
]
MessageLike = typing.Union[str, types.Message]
MessageIDLike = typing.Union[int, types.Message, types.TypeInputMessage]
ProgressCallback = typing.Callable[[int, int], None]
|
SanchayanMaity/gem5 | configs/example/fs.py | Python | bsd-3-clause | 15,347 | 0.004496 | # Copyright (c) 2010-2013, 2016 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2012-2014 Mark D. Hill and David A. Wood
# Copyright (c) 2009-2011 Advanced Micro Devices, Inc.
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED | OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ali Saidi
# Brad Beckmann
import optparse
import sys
import m5
from m5.defines import buildEnv
from m5.objects import *
from m5.util import addToPath, fatal
addToPath('../')
from ruby import Ruby
from common.FSConfig import *
from common.SysPaths import *
from common.Benchmarks import *
from common import Simulation
from common import CacheConfig
from common im | port MemConfig
from common import CpuConfig
from common.Caches import *
from common import Options
# Check if KVM support has been enabled, we might need to do VM
# configuration if that's the case.
have_kvm_support = 'BaseKvmCPU' in globals()
def is_kvm_cpu(cpu_class):
return have_kvm_support and cpu_class != None and \
issubclass(cpu_class, BaseKvmCPU)
def cmd_line_template():
if options.command_line and options.command_line_file:
print "Error: --command-line and --command-line-file are " \
"mutually exclusive"
sys.exit(1)
if options.command_line:
return options.command_line
if options.command_line_file:
return open(options.command_line_file).read().strip()
return None
def build_test_system(np):
cmdline = cmd_line_template()
if buildEnv['TARGET_ISA'] == "alpha":
test_sys = makeLinuxAlphaSystem(test_mem_mode, bm[0], options.ruby,
cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "mips":
test_sys = makeLinuxMipsSystem(test_mem_mode, bm[0], cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "sparc":
test_sys = makeSparcSystem(test_mem_mode, bm[0], cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "x86":
test_sys = makeLinuxX86System(test_mem_mode, options.num_cpus, bm[0],
options.ruby, cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "arm":
test_sys = makeArmSystem(test_mem_mode, options.machine_type,
options.num_cpus, bm[0], options.dtb_filename,
bare_metal=options.bare_metal,
cmdline=cmdline,
external_memory=options.external_memory_system,
ruby=options.ruby)
if options.enable_context_switch_stats_dump:
test_sys.enable_context_switch_stats_dump = True
else:
fatal("Incapable of building %s full system!", buildEnv['TARGET_ISA'])
# Set the cache line size for the entire system
test_sys.cache_line_size = options.cacheline_size
# Create a top-level voltage domain
test_sys.voltage_domain = VoltageDomain(voltage = options.sys_voltage)
# Create a source clock for the system and set the clock period
test_sys.clk_domain = SrcClockDomain(clock = options.sys_clock,
voltage_domain = test_sys.voltage_domain)
# Create a CPU voltage domain
test_sys.cpu_voltage_domain = VoltageDomain()
# Create a source clock for the CPUs and set the clock period
test_sys.cpu_clk_domain = SrcClockDomain(clock = options.cpu_clock,
voltage_domain =
test_sys.cpu_voltage_domain)
if options.kernel is not None:
test_sys.kernel = binary(options.kernel)
if options.script is not None:
test_sys.readfile = options.script
if options.lpae:
test_sys.have_lpae = True
if options.virtualisation:
test_sys.have_virtualization = True
test_sys.init_param = options.init_param
# For now, assign all the CPUs to the same clock domain
test_sys.cpu = [TestCPUClass(clk_domain=test_sys.cpu_clk_domain, cpu_id=i)
for i in xrange(np)]
if is_kvm_cpu(TestCPUClass) or is_kvm_cpu(FutureClass):
test_sys.vm = KvmVM()
if options.ruby:
# Check for timing mode because ruby does not support atomic accesses
if not (options.cpu_type == "detailed" or options.cpu_type == "timing"):
print >> sys.stderr, "Ruby requires TimingSimpleCPU or O3CPU!!"
sys.exit(1)
Ruby.create_system(options, True, test_sys, test_sys.iobus,
test_sys._dma_ports)
# Create a seperate clock domain for Ruby
test_sys.ruby.clk_domain = SrcClockDomain(clock = options.ruby_clock,
voltage_domain = test_sys.voltage_domain)
# Connect the ruby io port to the PIO bus,
# assuming that there is just one such port.
test_sys.iobus.master = test_sys.ruby._io_port.slave
for (i, cpu) in enumerate(test_sys.cpu):
#
# Tie the cpu ports to the correct ruby system ports
#
cpu.clk_domain = test_sys.cpu_clk_domain
cpu.createThreads()
cpu.createInterruptController()
cpu.icache_port = test_sys.ruby._cpu_ports[i].slave
cpu.dcache_port = test_sys.ruby._cpu_ports[i].slave
if buildEnv['TARGET_ISA'] in ("x86", "arm"):
cpu.itb.walker.port = test_sys.ruby._cpu_ports[i].slave
cpu.dtb.walker.port = test_sys.ruby._cpu_ports[i].slave
if buildEnv['TARGET_ISA'] in "x86":
cpu.interrupts[0].pio = test_sys.ruby._cpu_ports[i].master
cpu.interrupts[0].int_master = test_sys.ruby._cpu_ports[i].slave
cpu.interrupts[0].int_slave = test_sys.ruby._cpu_ports[i].master
else:
if options.caches or options.l2cache:
# By default the IOCache runs at the system clock
test_sys.iocache = IOCache(addr_ranges = test_sys.mem_ranges)
test_sys.iocache.cpu_side = test_sys.iobus.master
test_sys.iocache.mem_side = test_sys.membus.slave
elif not options.external_memory_system:
test_sys.iobridge = Bridge(delay |
hpcugent/vsc-mympirun | test/mytaskprolog.py | Python | gpl-2.0 | 1,831 | 0.003277 | #
# Copyright 2019-2022 Ghent University
#
# This file is part of vsc-mympirun,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# the Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/hpcugent/vsc-mympirun
#
# vsc-mympirun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# vsc-mympirun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more deta | ils.
#
# You should have received a copy of the GNU General Public License
# along with vsc-mympirun. If not, see <http://www.gnu.org/licenses/>.
#
"""
End-to-end tests for mypmirun
"""
import os
import logging
logging.basicConfig(level=logging.DEBUG)
from pmi_utils import PMITest
from vs | c.utils.affinity import sched_getaffinity, sched_setaffinity
class TaskPrologEnd2End(PMITest):
def setUp(self):
"""Prepare to run test."""
super(TaskPrologEnd2End, self).setUp()
self.script = os.path.join(os.path.dirname(self.script), 'mytaskprolog.py')
def test_simple(self):
origaff = sched_getaffinity()
aff = sched_getaffinity()
aff.set_bits([1]) # only use first core (we can always assume there is one core
sched_setaffinity(aff)
self.pmirun([], pattern='export CUDA_VISIBLE_DEVICES=0')
# restore
sched_setaffinity(origaff)
|
BitWriters/Zenith_project | zango/lib/python3.5/site-packages/django/contrib/postgres/validators.py | Python | mit | 2,718 | 0.004415 | import copy
from django.core.exceptions import ValidationError
from django.core.validators import (
MaxLengthValidator, MaxValueValidator, MinLengthValidator,
MinValueValidator,
)
from django.utils.deconstruct import deconstructible
from django.utils.translation import ugettext_lazy as _, ungettext_lazy
class ArrayMaxLengthValidator(MaxLengthValidator):
message = ungettext_lazy(
'List contains %(show_value)d item, it should contain no more than %(limit_value)d.',
'List contains %(show_value)d items, it should contain no more than %(limit_value)d.',
'limit_value')
class ArrayMinLengthValidator(MinLengthValidator):
message = ungettext_lazy(
'List contains %(show_value)d item, it should contain no fewer than %(limit_value)d.',
'List contains %(show_value)d items, it should contain no fewer than %(limit_value)d.',
'limit_value')
@deconstructible
class KeysValidator(object):
"""A validator designed for HStore to require/restrict keys."""
messages = {
'missing_keys': _('Some keys were missing: %(keys)s'),
'extra_keys': _('Some unknown keys were provided: %(keys)s'),
}
strict = False
def __ini | t__(self, keys, strict=False, messages=None):
self.keys = set(keys)
self.strict = strict
if messages is not None:
self.messages = copy.copy(self.messages)
self.messages.update(messages) |
def __call__(self, value):
keys = set(value.keys())
missing_keys = self.keys - keys
if missing_keys:
raise ValidationError(self.messages['missing_keys'],
code='missing_keys',
params={'keys': ', '.join(missing_keys)},
)
if self.strict:
extra_keys = keys - self.keys
if extra_keys:
raise ValidationError(self.messages['extra_keys'],
code='extra_keys',
params={'keys': ', '.join(extra_keys)},
)
def __eq__(self, other):
return (
isinstance(other, self.__class__)
and (self.keys == other.keys)
and (self.messages == other.messages)
and (self.strict == other.strict)
)
def __ne__(self, other):
return not (self == other)
class RangeMaxValueValidator(MaxValueValidator):
compare = lambda self, a, b: a.upper > b
message = _('Ensure that this range is completely less than or equal to %(limit_value)s.')
class RangeMinValueValidator(MinValueValidator):
compare = lambda self, a, b: a.lower < b
message = _('Ensure that this range is completely greater than or equal to %(limit_value)s.')
|
SlashDK/OpenCV-simplestuff | vendors/microsoft.py | Python | mit | 1,684 | 0.00772 | import base64
import json
import requests
def call_vision_api(image_filename, api_keys):
api_key = api_keys['microsoft']
post_url = "https://api.projectoxford.ai/vision/v1.0/analyze?visualFeatures=Categories,Tags,Description,Faces,ImageType,Color,Adult&subscription | -key=" + api_key
image_data = open(image_filename, 'rb').read()
result = requests.post(post_url, data=image_data, headers={'Content-Type': 'application/octet-stream'})
result.raise_for_status()
return result.text
# Return a dictionary of features to their scored values (represented as lists of tuples).
# Scored values must be sorted in descending order.
#
# {
# 'feature_1' : [(element, | score), ...],
# 'feature_2' : ...
# }
#
# E.g.,
#
# {
# 'tags' : [('throne', 0.95), ('swords', 0.84)],
# 'description' : [('A throne made of pointy objects', 0.73)]
# }
#
def get_standardized_result(api_result):
output = {
'tags' : [],
'captions' : [],
# 'categories' : [],
# 'adult' : [],
# 'image_types' : []
# 'tags_without_score' : {}
}
for tag_data in api_result['tags']:
output['tags'].append((tag_data['name'], tag_data['confidence']))
for caption in api_result['description']['captions']:
output['captions'].append((caption['text'], caption['confidence']))
# for category in api_result['categories']:
# output['categories'].append(([category['name'], category['score']))
# output['adult'] = api_result['adult']
# for tag in api_result['description']['tags']:
# output['tags_without_score'][tag] = 'n/a'
# output['image_types'] = api_result['imageType']
return output
|
ivano666/tensorflow | tensorflow/contrib/testing/python/framework/test_util.py | Python | apache-2.0 | 4,116 | 0.007532 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import os
import numpy as np
from tensorflow.core.framework import summary_pb2
from tensorflow.python.training import summary_io
def assert_summary(expected_tags, expected_simple_values, summary_proto):
"""Asserts summary contains the specified tags and values.
Args:
expected_tags: All tags in summary.
expected_simple_values: Simply values for some tags.
summary_proto: Summary to validate.
Raises:
ValueError: if expectations are not met.
"""
actual_tags = set()
for value in summary_proto.value:
actual_tags.add(value.tag)
if value.tag in expected_simple_values:
expected = expected_simple_values[value.tag]
actual = value.simple_value
np.testing.assert_almost_equal(
actual, expected, decimal=2, err_msg=value.tag)
expected_tags = set(expected_tags)
if expected_tags != actual_tags:
raise ValueError('Expected tags %s, got %s.' % (expected_tags, actual_tags))
def to_summary_proto(summary_str):
"""Create summary based on latest stats.
Args:
summary_str: Serialized summary.
Returns:
summary_pb2.Summary.
Raises:
ValueError: if tensor is not a valid summary tensor.
"""
summary = summary_pb2.Summary()
summary.ParseFromString(summary_str)
return summary
# TODO(ptucker): Move to a non-test package?
def latest_event_file(base_dir):
"""Find latest event file in `base_dir`.
Args:
base_dir: Base directory in which TF event flies are stored.
Returns:
File path, or `None` if none exists.
"""
file_paths = glob.glob(os.path.join(base_dir, 'events.*'))
return sorted(file_paths)[-1] if file_paths else None
def latest_events(base_dir):
"""Parse events from latest event file in base_dir.
Args:
base_dir: Base directory in which TF event flies are stored.
Returns:
Iterable of event protos.
Raises:
ValueError: if no event files exist under base_dir.
"""
file_path = latest_event_file(base_dir)
return summary_io.summary_iterator(file_path) if file_path else []
def latest_summaries(base_dir):
"""Parse summary events from latest event file in base_dir.
Args:
base_dir: Base directory in which TF event flies are stored.
Returns:
List of event protos.
Raises:
ValueError: if no event files | exist under base_dir.
"""
return [e for e in latest_events(base_dir) if e.HasField('summary')]
def simple_v | alues_from_events(events, tags):
"""Parse summaries from events with simple_value.
Args:
events: List of tensorflow.Event protos.
tags: List of string event tags corresponding to simple_value summaries.
Returns:
dict of tag:value.
Raises:
ValueError: if a summary with a specified tag does not contain simple_value.
"""
step_by_tag = {}
value_by_tag = {}
for e in events:
if e.HasField('summary'):
for v in e.summary.value:
tag = v.tag
if tag in tags:
if not v.HasField('simple_value'):
raise ValueError('Summary for %s is not a simple_value.' % tag)
# The events are mostly sorted in step order, but we explicitly check
# just in case.
if tag not in step_by_tag or e.step > step_by_tag[tag]:
step_by_tag[tag] = e.step
value_by_tag[tag] = v.simple_value
return value_by_tag
|
mayankjohri/LetsExplorePython | Section 2 - Advance Python/Chapter S2.05 - REST API - Server & Clients/code/clients/old/6_Cookies_with_Authentication.py | Python | gpl-3.0 | 290 | 0.034483 | import requests
session = requests.session()
user ="mayank"
password = "test"
p = session.post("http://127.0.0.1:5000/api/users", {'user':user,'password':password})
print ('headers', p.headers)
print ('cookies', requests.utils.d | ict_from_cookiejar(session.cookies))
| print ('html', p.text) |
JPWKU/unix-agent | src/dcm/agent/tests/unit/test_backoff.py | Python | apache-2.0 | 7,619 | 0 | #
# Copyright (C) 2014 Dell, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import math
import mock
import time
import unittest
import dcm.agent.connection.websocket as websocket
import dcm.agent.handshake as handshake
import dcm.agent.tests.utils.general as test_utils
from dcm.agent.events.globals import global_space as dcm_events
def fake_incoming_message(incoming_doc):
pass
class TestBackoff(unittest.TestCase):
@classmethod
def setUpClass(cls):
test_utils.connect_to_debugger()
def run_with_connect_errors(
self,
backoff_seconds,
max_backoff_seconds,
run_time_seconds,
conn_obj):
class FakeHS(object):
def get_send_document(self):
ws.throw_error(Exception("just for tests"))
return {}
def incoming_document(self, incoming_doc):
return handshake.HandshakeIncomingReply(
handshake.HandshakeIncomingReply.REPLY_CODE_SUCCESS)
m = mock.Mock()
conn_obj.return_value = m
server_url = "wss://notreal.com"
ws = websocket.WebSocketConnection(
server_url,
backoff_amount=int(backoff_seconds*1000),
max_backoff=int(max_backoff_seconds*1000))
ws.connect(fake_incoming_message, FakeHS())
nw = datetime.datetime.now()
done_time = nw + datetime.timedelta(seconds=run_time_seconds)
while done_time > nw:
remaining = done_time - nw
dcm_events.poll(timeblock=remaining.total_seconds())
nw = datetime.datetime.now()
ws.close()
return m
@mock.patch('dcm.agent.connection.websocket._WebSocketClient')
def test_no_retry(self, conn_obj):
"""Make sure that just 1 connect happens when waiting less than the
backoff time"""
m = mock.Mock()
conn_obj.return_value = m
backoff_seconds = 3.0
max_backoff_seconds = backoff_seconds * 100.0 # just make a big number
run_time_seconds = backoff_seconds / 2.0 # less then the back off
m = self.run_with_connect_errors(
backoff_seconds,
max_backoff_seconds,
run_time_seconds,
conn_obj)
self.assertEqual(1, m.connect.call_count)
@mock.patch('dcm.agent.connection.websocket._WebSocketClient')
def test_retry_connections(self, conn_obj):
"""Make sure reconnections happen"""
m = mock.Mock()
conn_obj.return_value = m
initial_backoff_seconds = 0.5
max_backoff_seconds = 600.0
run_time_seconds = 5.0
expected_backoff_count =\
int(math.log(run_time_seconds / initial_backoff_seconds, 2))
m = self.run_with_connect_errors(
initial_backoff_seconds,
max_backoff_seconds,
run_time_seconds,
conn_obj)
self.assertLessEqual(expected_backoff_count-2, m.connect.call_count)
self.assertGreaterEqual(expected_backoff_count+2, m.connect.call_count)
@mock.patch('dcm.agent.connection.websocket._WebSocketClient')
def test_retry_connections_never_more_than_max_back(self, conn_obj):
m = mock.Mock()
conn_obj.return_value = m
initial_backoff_seconds = 5.0
max_backoff_seconds = 0.1
run_time_seconds = 3.0
expected_backoff_count = run_time_seconds / max_backoff_seconds
m = self.run_with_connect_errors(
initial_backoff_seconds,
max_backoff_seconds,
run_time_seconds,
conn_obj)
self.assertGreaterEqual(expected_backoff_count, m.connect.call_count)
@mock.patch('dcm.agent.connection.websocket._WebSocketClient')
def test_force_backoff(self, conn_obj):
# force the backoff to be longer than the max run time then make sure
# that the connect is only called once
backoff_seconds = 0.2
max_backoff_seconds = backoff_seconds
run_time_seconds = backoff_seconds * 10.0
force_time = run_time_seconds + 1.0
m = mock.Mock()
conn_obj.return_value = m
server_url = "wss://notreal.com"
ws = websocket.WebSocketConnection(
server_url,
backoff_amount=int(backoff_seconds*1000),
max_backoff=int(max_backoff_seconds*1000))
def send_in_handshake():
ws.event_incoming_message(
{handshake.HandshakeIncomingReply.REPLY_KEY_FORCE_BACKOFF:
force_time,
'return_code':
handshake.HandshakeIncomingReply.REPLY_CODE_FORCE_BACKOFF})
class FakeHS(object):
def get_send_document(self):
dcm_events.register_callback(send_in_handshake)
return {}
def incoming_document(self, incoming_doc):
hs = handshake.HandshakeIncomingReply(
handshake.HandshakeIncomingReply.REPLY_CODE_FORCE_BACKOFF,
force_backoff=force_time)
return hs
ws.connect(fake_incoming_message, FakeHS())
nw = datetime.datetime.now()
done_time = nw + datetime.timedelta(seconds=run_time_seconds)
while done_time > nw:
remaining = done_time - nw
dcm_events.poll(timeblock=remaining.total_seconds())
nw = datet | ime.datetime.now()
ws.close()
self.assertEqual(1, m.connect.call_count)
def test_backoff_object_ready_immediately(self):
initial_backoff_second = 300.0
max_ba | ckoff_seconds = initial_backoff_second
backoff = websocket.Backoff(
max_backoff_seconds,
initial_backoff_second=initial_backoff_second)
self.assertTrue(backoff.ready())
def test_backoff_object_error_not_ready(self):
initial_backoff_second = 300.0
max_backoff_seconds = initial_backoff_second
backoff = websocket.Backoff(
max_backoff_seconds,
initial_backoff_second=initial_backoff_second)
backoff.error()
self.assertFalse(backoff.ready())
def test_backoff_object_error_wait_ready(self):
initial_backoff_second = 0.05
max_backoff_seconds = initial_backoff_second
backoff = websocket.Backoff(
max_backoff_seconds,
initial_backoff_second=initial_backoff_second)
backoff.error()
time.sleep(initial_backoff_second)
self.assertTrue(backoff.ready())
def test_backoff_object_ready_after_many_errors_than_activity(self):
initial_backoff_second = 0.05
max_backoff_seconds = initial_backoff_second
backoff = websocket.Backoff(
max_backoff_seconds,
initial_backoff_second=initial_backoff_second)
backoff.error()
backoff.error()
backoff.error()
backoff.error()
backoff.error()
backoff.error()
self.assertFalse(backoff.ready())
backoff.activity()
self.assertTrue(backoff.ready())
|
couchbaselabs/celery | celery/tests/slow/test_buckets.py | Python | bsd-3-clause | 10,768 | 0.001022 | from __future__ import absolute_import
from __future__ import with_statement
import sys
import time
from functools import partial
from itertools import chain, izip
from Queue import Empty
from mock import Mock, patch
from celery.app.registry import TaskRegistry
from celery.task.base import Task
from celery.utils import timeutils
from celery.utils import uuid
from celery.worker import buckets
from celery.tests.utils import Case, skip_if_environ, mock_context
skip_if_disabled = partial(skip_if_environ("SKIP_RLIMITS"))
class MockJob(object):
def __init__(self, id, name, args, kwargs):
self.id = id
self.name = name
self.args = args
self.kwargs = kwargs
def __eq__(self, other):
if isinstance(other, self.__class__):
return bool(self.id == other.id \
and self.name == other.name \
and self.args == other.args \
and self.kwargs == other.kwargs)
else:
return self == other
def __repr__(self):
return "<MockJob: task:%s id:%s args:%s kwargs:%s" % (
self.name, self.id, self.args, self.kwargs)
class test_TokenBucketQueue(Case):
@skip_i | f_disabled
def empty_queue_yields_QueueEmpty(self):
x = buckets.TokenBucketQueue(fill_rate=10)
with self.asser | tRaises(buckets.Empty):
x.get()
@skip_if_disabled
def test_bucket__put_get(self):
x = buckets.TokenBucketQueue(fill_rate=10)
x.put("The quick brown fox")
self.assertEqual(x.get(), "The quick brown fox")
x.put_nowait("The lazy dog")
time.sleep(0.2)
self.assertEqual(x.get_nowait(), "The lazy dog")
@skip_if_disabled
def test_fill_rate(self):
x = buckets.TokenBucketQueue(fill_rate=10)
# 20 items should take at least one second to complete
time_start = time.time()
[x.put(str(i)) for i in xrange(20)]
for i in xrange(20):
sys.stderr.write(".")
x.wait()
self.assertGreater(time.time() - time_start, 1.5)
@skip_if_disabled
def test_can_consume(self):
x = buckets.TokenBucketQueue(fill_rate=1)
x.put("The quick brown fox")
self.assertEqual(x.get(), "The quick brown fox")
time.sleep(0.1)
# Not yet ready for another token
x.put("The lazy dog")
with self.assertRaises(x.RateLimitExceeded):
x.get()
@skip_if_disabled
def test_expected_time(self):
x = buckets.TokenBucketQueue(fill_rate=1)
x.put_nowait("The quick brown fox")
self.assertEqual(x.get_nowait(), "The quick brown fox")
self.assertFalse(x.expected_time())
@skip_if_disabled
def test_qsize(self):
x = buckets.TokenBucketQueue(fill_rate=1)
x.put("The quick brown fox")
self.assertEqual(x.qsize(), 1)
self.assertEqual(x.get_nowait(), "The quick brown fox")
class test_rate_limit_string(Case):
@skip_if_disabled
def test_conversion(self):
self.assertEqual(timeutils.rate(999), 999)
self.assertEqual(timeutils.rate("1456/s"), 1456)
self.assertEqual(timeutils.rate("100/m"),
100 / 60.0)
self.assertEqual(timeutils.rate("10/h"),
10 / 60.0 / 60.0)
for zero in (0, None, "0", "0/m", "0/h", "0/s"):
self.assertEqual(timeutils.rate(zero), 0)
class TaskA(Task):
rate_limit = 10
class TaskB(Task):
rate_limit = None
class TaskC(Task):
rate_limit = "1/s"
class TaskD(Task):
rate_limit = "1000/m"
class test_TaskBucket(Case):
def setUp(self):
self.registry = TaskRegistry()
self.task_classes = (TaskA, TaskB, TaskC)
for task_cls in self.task_classes:
self.registry.register(task_cls)
@skip_if_disabled
def test_get_nowait(self):
x = buckets.TaskBucket(task_registry=self.registry)
with self.assertRaises(buckets.Empty):
x.get_nowait()
@patch("celery.worker.buckets.sleep")
def test_get_block(self, sleep):
x = buckets.TaskBucket(task_registry=self.registry)
x.not_empty = Mock()
get = x._get = Mock()
remaining = [0]
def effect():
if get.call_count == 1:
raise Empty()
rem = remaining[0]
remaining[0] = 0
return rem, Mock()
get.side_effect = effect
with mock_context(Mock()) as context:
x.not_empty = context
x.wait = Mock()
x.get(block=True)
get.reset()
remaining[0] = 1
x.get(block=True)
def test_get_raises_rate(self):
x = buckets.TaskBucket(task_registry=self.registry)
x.buckets = {1: Mock()}
x.buckets[1].get_nowait.side_effect = buckets.RateLimitExceeded()
x.buckets[1].expected_time.return_value = 0
x._get()
@skip_if_disabled
def test_refresh(self):
reg = {}
x = buckets.TaskBucket(task_registry=reg)
reg["foo"] = "something"
x.refresh()
self.assertIn("foo", x.buckets)
self.assertTrue(x.get_bucket_for_type("foo"))
@skip_if_disabled
def test__get_queue_for_type(self):
x = buckets.TaskBucket(task_registry={})
x.buckets["foo"] = buckets.TokenBucketQueue(fill_rate=1)
self.assertIs(x._get_queue_for_type("foo"), x.buckets["foo"].queue)
x.buckets["bar"] = buckets.FastQueue()
self.assertIs(x._get_queue_for_type("bar"), x.buckets["bar"])
@skip_if_disabled
def test_update_bucket_for_type(self):
bucket = buckets.TaskBucket(task_registry=self.registry)
b = bucket._get_queue_for_type(TaskC.name)
self.assertIs(bucket.update_bucket_for_type(TaskC.name).queue, b)
self.assertIs(bucket.buckets[TaskC.name].queue, b)
@skip_if_disabled
def test_auto_add_on_missing_put(self):
reg = {}
b = buckets.TaskBucket(task_registry=reg)
reg["nonexisting.task"] = "foo"
b.put(MockJob(uuid(), "nonexisting.task", (), {}))
self.assertIn("nonexisting.task", b.buckets)
@skip_if_disabled
def test_auto_add_on_missing(self):
b = buckets.TaskBucket(task_registry=self.registry)
for task_cls in self.task_classes:
self.assertIn(task_cls.name, b.buckets.keys())
self.registry.register(TaskD)
self.assertTrue(b.get_bucket_for_type(TaskD.name))
self.assertIn(TaskD.name, b.buckets.keys())
self.registry.unregister(TaskD)
@skip_if_disabled
def test_has_rate_limits(self):
b = buckets.TaskBucket(task_registry=self.registry)
self.assertEqual(b.buckets[TaskA.name]._bucket.fill_rate, 10)
self.assertIsInstance(b.buckets[TaskB.name], buckets.Queue)
self.assertEqual(b.buckets[TaskC.name]._bucket.fill_rate, 1)
self.registry.register(TaskD)
b.init_with_registry()
try:
self.assertEqual(b.buckets[TaskD.name]._bucket.fill_rate,
1000 / 60.0)
finally:
self.registry.unregister(TaskD)
@skip_if_disabled
def test_on_empty_buckets__get_raises_empty(self):
b = buckets.TaskBucket(task_registry=self.registry)
with self.assertRaises(buckets.Empty):
b.get(block=False)
self.assertEqual(b.qsize(), 0)
@skip_if_disabled
def test_put__get(self):
b = buckets.TaskBucket(task_registry=self.registry)
job = MockJob(uuid(), TaskA.name, ["theqbf"], {"foo": "bar"})
b.put(job)
self.assertEqual(b.get(), job)
@skip_if_disabled
def test_fill_rate(self):
b = buckets.TaskBucket(task_registry=self.registry)
cjob = lambda i: MockJob(uuid(), TaskA.name, [i], {})
jobs = [cjob(i) for i in xrange(20)]
[b.put(job) for job in jobs]
self.assertEqual(b.qsize(), 20)
# 20 items should take at least one second to complete
time_start = time.time()
for i, job in enumerate(jobs):
sys.stderr.write(". |
j-mracek/dnf_docker_test | features/steps/test_behave.py | Python | mit | 9,384 | 0.002558 | #!/usr/bin/python -tt
from behave import *
import os
import subprocess
import glob
import re
import shutil
DNF_FLAGS = ['-y', '--disablerepo=*', '--nogpgcheck']
RPM_INSTALL_FLAGS = ['-Uvh']
RPM_ERASE_FLAGS = ['-e']
def _left_decorator(item):
""" Removed packages """
return u'-' + item
def _right_decorator(item):
""" Installed packages """
return u'+' + item
def find_pkg(pkg):
""" Find the package file in the repository """
candidates = glob.glob('/repo/' + pkg + '*.rpm')
if len(candidates) == 0:
print("No candidates for: '{0}'".format(pkg))
assert len(candidates) == 1
return candidates[0]
def decorate_rpm_packages(pkgs):
""" Converts package names like TestA, TestB into absolute paths """
return [find_pkg(p) for p in pkgs]
def get_rpm_package_list():
""" Gets all installed packages in the system """
pkgstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}\n'])
return pkgstr.splitlines()
def get_rpm_package_version_list():
""" Gets all installed packages in the system with version"""
pkgverstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}-%{VERSION}-%{RELEASE}\n'])
return pkgverstr.splitlines()
def get_dnf_package_version_list():
""" Gets all installed packages in the system with version to check that dnf has same data about installed packages"""
pkgverstr = subprocess.check_output(['dnf', 'repoquery', '--installed', '-Cq', '--queryformat', '%{name}.%{version}.%{release}\n'])
pkgverstr = pkgverstr.splitlines()
return pkgverstr
def diff_package_lists(a, b):
""" Computes both left/right diff between lists `a` and `b` """
sa, sb = set(a), set(b)
return (map(_left_decorator, list(sa - sb)),
map(_right_decorator, list(sb - sa)))
def package_version_lists(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if x.startswith(pkg)]
assert len(found_pkgs) == 1
return str(found_pkgs[0])
def package_absence(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if re.search('^' + pkg, x)]
assert len(found_pkgs) == 0
return None
def execute_dnf_command(cmd, reponame):
""" Execute DNF command with default flags and the specified `reponame` enabled """
flags = DNF_FLAGS + ['--enablerepo={0}'.format(reponame)]
return subprocess.check_call(['dnf'] + flags + cmd, stdout=subprocess.PIPE)
def execute_rpm_command(pkg, action):
""" Execute given action over specified pkg(s) """
if not isinstance(pkg, list):
pkg = [pkg]
if action == "remove":
rpm_command = RPM_ERASE_FLAGS
elif action == "install":
rpm_command = RPM_INSTALL_FLAGS
pkg = decorate_rpm_packages(pkg)
return subprocess.check_call(['rpm'] + rpm_command + pkg, stdout=subprocess.PIPE)
def piecewise_compare(a, b):
""" Check if the two sequences are identical regardless of ordering """
return sorted(a) == sorted(b)
def split(pkgs) | :
return [p.strip() for p in pkgs.split(',')]
@given('I use the repository "{repo}"')
def given_repo_condition(context, repo):
""" :type context: behave.runner.Context """
assert repo
context.repo = repo
assert os.path.exists('/var/www/html/repo/' + repo)
for root, dirs, files in os.walk('/repo'):
for | f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
subprocess.check_call(['cp -rs /var/www/html/repo/' + repo + '/* /repo/'], shell=True)
with open('/etc/yum.repos.d/' + repo + '.repo', 'w') as f:
f.write('[' + repo + ']\nname=' + repo + '\nbaseurl=http://127.0.0.1/repo/' + repo + '\nenabled=1\ngpgcheck=0')
@when('I "{action}" a package "{pkgs}" with "{manager}"')
def when_action_package(context, action, pkgs, manager):
assert pkgs
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
if manager == 'rpm':
if action in ["install", "remove"]:
execute_rpm_command(split(pkgs), action)
else:
raise AssertionError('The action {} is not allowed parameter with rpm manager'.format(action))
elif manager == 'dnf':
if action == 'upgrade':
if pkgs == 'all':
execute_dnf_command([action], context.repo)
else:
execute_dnf_command([action] + split(pkgs), context.repo)
elif action == 'autoremove':
subprocess.check_call(['dnf', '-y', action], stdout=subprocess.PIPE)
elif action in ["install", "remove", "downgrade", "upgrade-to"]:
execute_dnf_command([action] + split(pkgs), context.repo)
else:
raise AssertionError('The action {} is not allowed parameter with dnf manager'.format(action))
else:
raise AssertionError('The manager {} is not allowed parameter'.format(manager))
@when('I execute command "{command}" with "{result}"')
def when_action_command(context, command, result):
assert command
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
cmd_output = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
context.cmd_rc = cmd_output.returncode
if result == "success":
assert context.cmd_rc == 0
elif result == "fail":
assert context.cmd_rc != 0
else:
raise AssertionError('The option {} is not allowed option for expected result of command. '
'Allowed options are "success" and "fail"'.format(result))
@then('package "{pkgs}" should be "{state}"')
def then_package_state(context, pkgs, state):
assert pkgs
pkgs_rpm = get_rpm_package_list()
pkgs_rpm_ver = get_rpm_package_version_list()
pkgs_dnf_ver = get_dnf_package_version_list()
assert pkgs_rpm
assert context.pre_rpm_packages
removed, installed = diff_package_lists(context.pre_rpm_packages, pkgs_rpm)
assert removed is not None and installed is not None
for n in split(pkgs):
if state == 'installed':
assert ('+' + n) in installed
installed.remove('+' + n)
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'removed':
assert ('-' + n) in removed
removed.remove('-' + n)
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'absent':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'upgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver > pre_rpm_ver
elif state == 'unupgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver == pre_rpm_ver
elif state == 'downgraded':
pre_rpm_ve |
YeelerG/twilio-python | setup.py | Python | mit | 2,325 | 0.012043 | from __future__ import with_statement
import sys
from setuptools import setup, find_packages
__version__ = None
with open('twilio/version.py') as f:
exec(f.read())
# To install the twilio-python library, open a Terminal shell, then run this
# file by typing:
#
# python setup.py install
#
# You need to have the setuptools module installed. Try reading the setuptools
# documentation: http://pypi.python.org/pypi/setuptools
REQUIRES = ["httplib2 >= 0.7", "six", "pytz"]
if sys.version_info < (2, 6):
REQUIRES.append('simplejson')
if sys.version_info >= (3,0):
REQUIRES.append('pysocks')
setup(
name = "twilio",
version = __version__,
description = "Twilio API client and TwiML generator",
author = "Twilio",
author_email = "help@twilio.com",
url = "https://github.com/twilio/twilio-python/",
keywords = ["twilio","twiml"],
install_requires = REQUIRES,
# bdist conditional requirements support
extras_require={
':python_version=="3.2"': ['pysocks'],
':python_version=="3.3"': ['pysocks'],
':python_version=="3.4"': ['pysocks'],
},
packages = find_packages(),
include_package_data=True,
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language | :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Communications :: Telephony",
],
long_description = """\
Python Tw | ilio Helper Library
----------------------------
DESCRIPTION
The Twilio REST SDK simplifies the process of making calls using the Twilio REST API.
The Twilio REST API lets to you initiate outgoing calls, list previous calls,
and much more. See https://www.github.com/twilio/twilio-python for more information.
LICENSE The Twilio Python Helper Library is distributed under the MIT
License """ )
|
sameerparekh/pants | src/python/pants/backend/codegen/targets/java_protobuf_library.py | Python | apache-2.0 | 1,867 | 0.004821 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
from pants.backend.jvm.targets.import_jars_mixin import ImportJarsMixin
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
logger = logging.getLogger(__name__)
class JavaProtobufLibrary(ImportJarsMixin, JvmTarget):
"""Generates a stub Java library from protobuf IDL files."""
def __init__(self, payload=None, buildflags=None, imports=None, **kwargs):
"""
:param buildflags: Unused, and will be removed in a future release.
:param list imports: List of addresses of `jar_library <#jar_library>`_
targets which contain .proto definitions.
"""
payload = payload or Payload()
# TODO(Eric Ayers): The target needs to incorporate the settings of --gen-protoc-ve | rsion
# and --gen-protoc-plugins into the fingerprint. Consider adding a custom FingeprintStrategy
# into ProtobufGen to get it.
payload.add_fields({
'import_specs': PrimitiveField(imports or ())
})
super(JavaProtobufLibrary, self).__init__(payload=payload, **kwargs)
if buildflags is not None:
logger.warn(" Target de | finition at {address} sets attribute 'buildflags' which is "
"ignored and will be removed in a future release"
.format(address=self.address.spec))
self.add_labels('codegen')
@property
def imported_jar_library_specs(self):
"""List of JarLibrary specs to import.
Required to implement the ImportJarsMixin.
"""
return self.payload.import_specs
|
anythingrandom/eclcli | eclcli/common/limits.py | Python | apache-2.0 | 3,896 | 0 | # Copyright 2012-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Limits Action Implementation"""
import itertools
from eclcli.common import command
from eclcli.common import utils
from eclcli.identity import common as identity_common
class ShowLimits(command.Lister):
"""Show compute and block storage limits"""
def get_parser(self, prog_name):
parser = super(ShowLimits, self).get_parser(prog_name)
type_group = parser.add_mutually_exclusive_group(required=True)
type_group.add_argument(
"--absolute",
dest="is_absolute",
action="store_true",
default=False,
help="Show absolute limits")
type_group.add_argument(
"--rate",
dest="is_rate",
action="store_true",
default=False,
help="Show rate limits")
parser.add_argument(
"--reserved",
dest="is_reserved",
action="store_true",
default=False,
help="Include reservations count [only valid with --absolute]")
parser.add_argument(
'--project',
metavar='<project>',
help='Show limits for a specific project (name or ID)'
' [only valid with --absolute]',
)
parser.add_argument(
'--domain',
metavar='<domain>',
help='Domain that owns --project (name or ID)'
' [only valid with --absolute]',
)
return parser
def take_action(self, parsed_args):
compute_client = self.app.client_manager.compute
volume_client = self.app.client_manager.volume
project_id = None
if parsed_args.project is not None:
identity_client = self.app.client_manager.identity
if parsed_args.domain is not None:
domain = identity_common.find_domain(identity_client,
parsed_args.domain)
project_id = utils.find_resource(identity_client.projects,
parsed_args.project,
domain_id=domain.id).id
else:
project_id = utils.find_resource(identity_client.projects,
parsed_args.project).id
compute_limits = compute_client.limits.get(parsed_args.is_reserved,
tenant_id=project_id)
volume_limits = volume_client.limits.get()
if parsed_args.is_absolute:
compute_limits = compute_limits.absolute
volume_limits = volume_limits.absolute
columns = ["Name", "Value"]
return (columns, (utils.get_item_properties(s, columns)
for s in itertools.chain(compute_limits, volume_limits)))
elif parsed_args.is_rate:
compute_limits = compute_limits.rate
volume_limits = volume_limits.rate
columns = ["Verb", "URI", "Value | ", "Remain", "Unit",
"Next Available"]
return (columns, (utils.get_item_properties(s, columns)
for s in i | tertools.chain(compute_limits, volume_limits)))
else:
return ({}, {})
|
thurloat/django-postleware | postleware/tests.py | Python | bsd-3-clause | 1,561 | 0.000641 | from django.http import HttpResponse
from django.utils import unittest
from django.test.client import C | lient
from postleware import PostResponseCachebusterMiddleware
class PostResponseMiddleware(unittest.TestCase):
def setUp(self):
self.client = Client()
def test_header_added_when_necessary(self):
# 'Cache-Control: no-cache' is added to POSTs
response = self.client.post('/test1', {'foo':'bar'})
self.assertEqual(response['Cache-Control'], 'no-cache')
# 'Cache-Control' is NOT added to GETs
response = self.client.get('/test1')
self.asse | rtFalse(response.has_header('Cache-Control'))
def test_header_not_added_when_present(self):
middleware = PostResponseCachebusterMiddleware()
test_header_setting = 'test-setting'
raw_response = HttpResponse()
# 'Cache-Control' header isn't modified when present on POSTs
request = MockRequest('POST')
raw_response['Cache-Control'] = test_header_setting
response = middleware.process_response(request, raw_response)
self.assertEqual(response['Cache-Control'], test_header_setting)
# 'Cache-Control' header isn't modified when present on GETs
request = MockRequest('GET')
raw_response['Cache-Control'] = test_header_setting
response = middleware.process_response(request, raw_response)
self.assertEqual(response['Cache-Control'], test_header_setting)
class MockRequest(object):
def __init__(self, method=None):
self.method = method
|
RianFuro/vint | vint/linting/config/config_comment_source.py | Python | mit | 1,825 | 0.003836 | import re
from vint.linting.config.config_source import ConfigSource
from vint.ast.node_type import NodeType
CONFIG_COMMENT_PATTERN = re.compile(r'^\s*vint:\s*')
POLICY_SWITCH_PATTERN = re.compile(r'[-\+]\S+')
class ConfigCommentSource(ConfigSource):
""" A class for ConfigCommentSourcees.
This class provide to change config by modeline-like config comments as
follow:
" vint: -PolicyA
" vint: +PolicyA
" vint: -PolicyA +PolicyB
Prefix vint: means that the comment is a config comment. And, +PolicyName
means to enable the policy, and -PolicyName means to disable.
"""
def __init__(self):
self._config_dict = {
'policies': {}
}
def get_config_dict(self):
return self._config_dict
def is_requesting_update(self, node):
return self._is_config_comment(node)
def update_by_node(self, node):
comment_content = node['str']
config_dict = self._config_dict
config_dict['policies'] = self._parse_config_comment(comment_content)
def _is_config_comment(self, node):
if NodeType(node['t | ype']) is not Nod | eType.COMMENT:
return False
comment_content = node['str']
return CONFIG_COMMENT_PATTERN.match(comment_content) is not None
def _parse_config_comment(self, comment_content):
striped_comment_content = CONFIG_COMMENT_PATTERN.sub('', comment_content)
policy_switches = POLICY_SWITCH_PATTERN.findall(striped_comment_content)
config_dict = {}
for policy_switch in policy_switches:
policy_name = policy_switch[1:]
is_enabling_switch = policy_switch[0] == '+'
config_dict[policy_name] = {
'enabled': is_enabling_switch
}
return config_dict
|
CrazyBBer/Python-Learn-Sample | gui.py | Python | mit | 433 | 0.04157 | # GUI
# python | for tkinter nature
from tkinter import *
class Application(Frame):
def __init__(self,master=None):
Frame.__init__(self,master)
self.pack()
self.createWidgets()
def createWidgets(self):
self.helloLabel=Label(self,text='Hello World')
self.helloLabel.pack()
self.quitButton = Button(self,text='Quit app')
self.quitButton.pack()
app = Application()
app.master.title('Hello world gui')
a | pp.mainloop() |
iot-factory/synapse | synapse/rest/client/v2_alpha/tokenrefresh.py | Python | apache-2.0 | 2,090 | 0.000478 | # -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, eith | er express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from synapse.api.errors import AuthError, StoreError, SynapseError
from synapse.http.servlet import RestServlet
from ._base import client_v2_pattern, parse_json_dict_from_request
class TokenRefreshRestServlet(RestServlet):
"""
Exchanges refresh tokens for a pair of an access token and a new refresh
token.
"""
PATTERN = clien | t_v2_pattern("/tokenrefresh")
def __init__(self, hs):
super(TokenRefreshRestServlet, self).__init__()
self.hs = hs
self.store = hs.get_datastore()
@defer.inlineCallbacks
def on_POST(self, request):
body = parse_json_dict_from_request(request)
try:
old_refresh_token = body["refresh_token"]
auth_handler = self.hs.get_handlers().auth_handler
(user_id, new_refresh_token) = yield self.store.exchange_refresh_token(
old_refresh_token, auth_handler.generate_refresh_token)
new_access_token = yield auth_handler.issue_access_token(user_id)
defer.returnValue((200, {
"access_token": new_access_token,
"refresh_token": new_refresh_token,
}))
except KeyError:
raise SynapseError(400, "Missing required key 'refresh_token'.")
except StoreError:
raise AuthError(403, "Did not recognize refresh token")
def register_servlets(hs, http_server):
TokenRefreshRestServlet(hs).register(http_server)
|
yuanagain/seniorthesis | venv/lib/python2.7/site-packages/scipy/io/matlab/mio.py | Python | mit | 8,793 | 0.000341 | """
Module for reading and writing matlab (TM) .mat files
"""
# Authors: Travis Oliphant, Matthew Brett
from __future__ import division, print_function, absolute_import
import numpy as np
from scipy._lib.six import string_types
from .miobase import get_matfile_version, docfiller
from .mio4 import MatFile4Reader, MatFile4Writer
from .mio5 import MatFile5Reader, MatFile5Writer
__all__ = ['mat_reader_factory', 'loadmat', 'savemat', 'whosmat']
def _open_file(file_like, appendmat):
''' Open `file_like` and return as file-like object '''
if isinstance(file_like, string_types):
try:
return open(file_like, 'rb')
except IOError as e:
if appendmat and not file_like.endswith('.mat'):
file_like += '.mat'
try:
return open(file_like, 'rb')
except IOError:
pass # Rethrow the original exception.
raise
# not a string - maybe file-like object
try:
file_like.read(0)
except AttributeError:
raise IOError('Reader needs file name or open file-like object')
return file_like
@docfiller
def mat_reader_factory(file_name, appendmat=True, **kwargs):
"""
Create reader for matlab .mat format files.
Parameters
----------
%(file_arg)s
%(append_arg)s
%(load_args)s
%(struct_arg)s
Returns
-------
matreader : MatFileReader object
Initialized instance of MatFileReader class matching the mat file
type detected in `filename`.
"""
byte_stream = _open_file(file_name, appendmat)
mjv, mnv = get_matfile_version(byte_stream)
if mjv == 0:
return MatFile4Reader(byte_stream, **kwargs)
elif mjv == 1:
return MatFile5Reader(byte_stream, **kwargs)
elif mjv == 2:
raise NotImplementedError('Please use HDF reader for matlab v7.3 files')
else:
raise TypeError('Did not recognize version %s' % mjv)
@docfiller
def loadmat(file_name, mdict=None, appendmat=True, **kwargs):
"""
Load MATLAB file.
Parameters
----------
file_name : str
Name of the mat file (do not need .mat extension if
appendmat==True). Can also pass open file-like object.
mdict : dict, optional
Dictionary in which to insert matfile variables.
appendmat : bool, optional
True to append the .mat extension to the end of the given
filename, if not already present.
byte_order : str or None, optional
None by d | efault, implying byte order guessed from mat
fi | le. Otherwise can be one of ('native', '=', 'little', '<',
'BIG', '>').
mat_dtype : bool, optional
If True, return arrays in same dtype as would be loaded into
MATLAB (instead of the dtype with which they are saved).
squeeze_me : bool, optional
Whether to squeeze unit matrix dimensions or not.
chars_as_strings : bool, optional
Whether to convert char arrays to string arrays.
matlab_compatible : bool, optional
Returns matrices as would be loaded by MATLAB (implies
squeeze_me=False, chars_as_strings=False, mat_dtype=True,
struct_as_record=True).
struct_as_record : bool, optional
Whether to load MATLAB structs as numpy record arrays, or as
old-style numpy arrays with dtype=object. Setting this flag to
False replicates the behavior of scipy version 0.7.x (returning
numpy object arrays). The default setting is True, because it
allows easier round-trip load and save of MATLAB files.
verify_compressed_data_integrity : bool, optional
Whether the length of compressed sequences in the MATLAB file
should be checked, to ensure that they are not longer than we expect.
It is advisable to enable this (the default) because overlong
compressed sequences in MATLAB files generally indicate that the
files have experienced some sort of corruption.
variable_names : None or sequence
If None (the default) - read all variables in file. Otherwise
`variable_names` should be a sequence of strings, giving names of the
matlab variables to read from the file. The reader will skip any
variable with a name not in this sequence, possibly saving some read
processing.
Returns
-------
mat_dict : dict
dictionary with variable names as keys, and loaded matrices as
values.
Notes
-----
v4 (Level 1.0), v6 and v7 to 7.2 matfiles are supported.
You will need an HDF5 python library to read matlab 7.3 format mat
files. Because scipy does not supply one, we do not implement the
HDF5 / 7.3 interface here.
"""
variable_names = kwargs.pop('variable_names', None)
MR = mat_reader_factory(file_name, appendmat, **kwargs)
matfile_dict = MR.get_variables(variable_names)
if mdict is not None:
mdict.update(matfile_dict)
else:
mdict = matfile_dict
if isinstance(file_name, string_types):
MR.mat_stream.close()
return mdict
@docfiller
def savemat(file_name, mdict,
appendmat=True,
format='5',
long_field_names=False,
do_compression=False,
oned_as='row'):
"""
Save a dictionary of names and arrays into a MATLAB-style .mat file.
This saves the array objects in the given dictionary to a MATLAB-
style .mat file.
Parameters
----------
file_name : str or file-like object
Name of the .mat file (.mat extension not needed if ``appendmat ==
True``).
Can also pass open file_like object.
mdict : dict
Dictionary from which to save matfile variables.
appendmat : bool, optional
True (the default) to append the .mat extension to the end of the
given filename, if not already present.
format : {'5', '4'}, string, optional
'5' (the default) for MATLAB 5 and up (to 7.2),
'4' for MATLAB 4 .mat files.
long_field_names : bool, optional
False (the default) - maximum field name length in a structure is
31 characters which is the documented maximum length.
True - maximum field name length in a structure is 63 characters
which works for MATLAB 7.6+.
do_compression : bool, optional
Whether or not to compress matrices on write. Default is False.
oned_as : {'row', 'column'}, optional
If 'column', write 1-D numpy arrays as column vectors.
If 'row', write 1-D numpy arrays as row vectors.
See also
--------
mio4.MatFile4Writer
mio5.MatFile5Writer
"""
file_is_string = isinstance(file_name, string_types)
if file_is_string:
if appendmat and file_name[-4:] != ".mat":
file_name = file_name + ".mat"
file_stream = open(file_name, 'wb')
else:
if not hasattr(file_name, 'write'):
raise IOError('Writer needs file name or writeable '
'file-like object')
file_stream = file_name
if format == '4':
if long_field_names:
raise ValueError("Long field names are not available for version 4 files")
MW = MatFile4Writer(file_stream, oned_as)
elif format == '5':
MW = MatFile5Writer(file_stream,
do_compression=do_compression,
unicode_strings=True,
long_field_names=long_field_names,
oned_as=oned_as)
else:
raise ValueError("Format should be '4' or '5'")
MW.put_variables(mdict)
if file_is_string:
file_stream.close()
@docfiller
def whosmat(file_name, appendmat=True, **kwargs):
"""
List variables inside a MATLAB file.
Parameters
----------
%(file_arg)s
%(append_arg)s
%(load_args)s
%(struct_arg)s
Returns
-------
variables : list of tuples
A list of tuples, where each tuple holds the matrix name (a string),
its shape (tuple of ints), and its data class (a string).
Possible data classes are: int8, uint8, int16, uint |
krainet/Wordplease | users/api_urls.py | Python | mit | 320 | 0 | # -*- coding: utf-8 -*-
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from users.api import UserViewSet
router = DefaultRouter()
router.register('user', UserViewSet, base_name= | 'user')
urlpatterns = [
url(r'1.0/', include(r | outer.urls)), # include de las url's router
]
|
trhd/cmdln | test/cmdln_main1.py | Python | mit | 443 | 0.011287 | #!/usr/bin/env python
"""
$ python cmdln_main1.py foo
hello from foo
$ python cmdln_main1.py #expecttest: INTERACTIVE, PROMPT="shell> "
shell> | foo
hello from foo
shell> ^D
"""
import sys
import cmdln
class Shell(cmdln.RawCmdln):
"This is my shell."
name = "shell"
def do_foo(self, argv):
print("hello from foo")
if __name__ == "__main__":
sys.exit( Shell().main(loop=cmdln.LOOP_IF_EMPT | Y) )
|
tetherless-world/dco-viz | dco-stats/puStats.py | Python | lgpl-3.0 | 984 | 0.029472 | from dcoStats import DCOStats
query = "where { ?obj a dco:ProjectUpdate . ?obj dco:forReportingYear <http://info.deepcarbon.net/individual/n33426> . }"
objFile = "puObjs"
cntFile = "puCount"
rqFile = "pu.rq"
def printIt( uri, jObj ):
if jObj and len( jObj ) > 0 and "Name" in jObj[0]:
print( jObj[0]["Name"]["value"] )
if "dcoId" in jObj[0]:
print( " DCO-ID: " + jObj[0]["dcoId"]["value"] )
if "ReportingYear" in jObj[0]:
print( " For Reporting year: " + jObj[0]["ReportingYear"]["value"] )
if "Project" in j | Obj[0]:
print( " For Project: " | + jObj[0]["Project"]["value"] )
if "EnteredBy" in jObj[0]:
print( " Entered By: " + jObj[0]["EnteredBy"]["value"] )
else:
print( "Missing or no information for Project Update " + uri )
print( "" )
print( "Project Updates" )
stats = DCOStats()
stats.getNew( query, objFile, query, cntFile, rqFile, printIt )
print( "" )
|
j-coll/opencga | opencga-app/app/cloud/azure/arm/scripts/mount.py | Python | apache-2.0 | 7,799 | 0.003334 | import sys
import socket
import fcntl
import struct
import random
import os
import shutil
import subprocess
import time
import csv
import ipaddress
# Run `python3 -m unittest discover` in this dir to execute tests
default_mount_options_nfs = "nfs hard,nointr,proto=tcp,mountproto=tcp,retry=30 0 0"
default_mount_options_cifs = "dir_mode=0777,file_mode=0777,serverino,nofail,uid=1001,gid=1001,vers=3.0"
def get_ip_address():
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
try:
# doesn't even have to be reachable
s.connect(("10.255.255.255", 1))
return s.getsockname()[0]
except:
return "127.0.0.1"
def ip_as_int(ip):
return int(ipaddress.ip_address(ip))
def remove_lines_containing(file, contains):
with open(file, "r+") as file:
d = file.readlines()
file.seek(0)
for i in d:
if contains not in i and i != "\n":
file.write(i)
file.truncate()
def print_help():
print("For example 'sudo python mount.py nfs '10.20.0.1:/folder1/nfsfolder2,10.20.0.1:/folder1/nfsfolder2'")
print(
"or 'sudo python mount.py azurefiles <storage-account-name>,<share-name>,<storage-account-key>'"
)
def install_apt_package(package):
try:
print("Attempt to install {}".format(package))
subprocess.check_call(["apt", "install", package, "-y"])
print("Install completed successfully")
except subprocess.CalledProcessError as e:
print("Failed install {} error: {}".format(package, e))
raise
# main allows the the mount script to be executable
def main():
if len(sys.argv) < 3:
print("Expected arg1: 'mount_type' and arg2 'mount_data'")
print_help()
exit(1)
mount_type = str(sys.argv[1])
mount_data = str(sys.argv[2])
mount_share(mount_type, mount_data)
# mount_share allows it to be invoked from other python scripts
def mount_share(mount_type, mount_data):
if mount_type.lower() != "nfs" and mount_type.lower() != "azurefiles":
print("Expected first arg to be either 'nfs' or 'azurefiles'")
print_help()
exit(1)
if mount_data == "":
print(
"""Expected second arg to be the mounting data. For NFS, this should be a CSV of IPs/FQDNS for the NFS servers with NFSExported dirs.
For example, '10.20.0.1:/folder1/nfsfolder2,10.20.0.1:/folder1/nfsfolder2'
For azure files this should be the | azure files connection details."""
)
print_help | ()
exit(2)
print("Mounting type: {}".format(sys.argv[1]))
print("Mounting data: {}".format(sys.argv[2]))
mount_point_permissions = 0o0777 # Todo: What permissions does this really need?
primary_mount_folder = "/media/primarynfs"
seconday_mount_folder_prefix = "/media/secondarynfs"
fstab_file_path = "/etc/fstab"
try:
# Create folder to mount to
if not os.path.exists(primary_mount_folder):
os.makedirs(primary_mount_folder)
os.chmod(primary_mount_folder, mount_point_permissions)
# Make a backup of the fstab config incase we go wrong
shutil.copy(fstab_file_path, "/etc/fstab-mountscriptbackup")
# Clear existing NFS mount data to make script idempotent
remove_lines_containing(fstab_file_path, primary_mount_folder)
remove_lines_containing(fstab_file_path, seconday_mount_folder_prefix)
if mount_type.lower() == "azurefiles":
mount_azurefiles(fstab_file_path, mount_data, primary_mount_folder)
if mount_type.lower() == "nfs":
mount_nfs(fstab_file_path, mount_data, primary_mount_folder, mount_point_permissions)
except IOError as e:
print("I/O error({0})".format(e))
exit(1)
except:
print("Unexpected error:{0}".format, sys.exc_info())
raise
print("Done editing fstab ... attempting mount")
def mount_all():
subprocess.check_call(["mount", "-a"])
retryFunc("mount shares", mount_all, 100)
def retryFunc(desc, funcToRetry, maxRetries):
# Retry mounting for a while to handle race where VM exists before storage
# or temporary issue with storage
print("Attempting, with retries, to: {}".format(desc))
retryExponentialFactor = 3
for i in range(1, maxRetries):
if i == maxRetries:
print("Failed after max retries")
exit(3)
try:
print("Attempt #{}".format(str(i)))
funcToRetry()
except subprocess.CalledProcessError as e:
print("Failed:{0}".format(e))
retry_in = i * retryExponentialFactor
print("retrying in {0}secs".format(retry_in))
time.sleep(retry_in)
continue
else:
print("Succeeded to: {0} after {1} retries".format(desc, i))
break
def mount_nfs(fstab_file_path, mount_data, primary_mount_folder, mount_point_permissions):
# # Other apt instances on the machine may be doing an install
# # this means ours will fail so we retry to ensure success
def install_nfs():
install_apt_package("nfs-common")
retryFunc("install nfs-common", install_nfs, 20)
ips = mount_data.split(",")
print("Found ips:{}".format(",".join(ips)))
# Deterministically select a primary node from the available
# servers for this vm to use. By using the ip as a seed this ensures
# re-running will get the same node as primary.
# This enables spreading the load across multiple storage servers in a cluster
# like `Avere` or `Gluster` for higher throughput.
current_ip = get_ip_address()
current_ip_int = ip_as_int(current_ip)
print("Using ip as int: {0} for random seed".format((current_ip_int)))
random.seed(current_ip_int)
random_node = random.randint(0, len(ips) - 1)
primary = ips[random_node]
ips.remove(primary)
secondarys = ips
print("Primary node selected: {}".format(primary))
print("Secondary nodes selected: {}".format(",".join(secondarys)))
with open(fstab_file_path, "a") as file:
print("Mounting primary")
file.write(
"\n{} {} {}".format(
primary.strip(), primary_mount_folder, default_mount_options_nfs
)
)
print("Mounting secondarys")
number = 0
for ip in secondarys:
number = number + 1
folder = "/media/secondarynfs" + str(number)
if not os.path.exists(folder):
os.makedirs(folder)
os.chmod(folder, mount_point_permissions)
file.write(
"\n{} {} {}".format(ip.strip(), folder, default_mount_options_nfs)
)
def mount_azurefiles(fstab_file_path, mount_data, primary_mount_folder):
# Other apt instances on the machine may be doing an install
# this means ours will fail so we retry to ensure success
def install_cifs():
install_apt_package("cifs-utils")
retryFunc("install cifs-utils", install_cifs, 20)
params = mount_data.split(",")
if len(params) != 3:
print("Wrong params for azure files mount, expected 3 as CSV")
print_help()
exit(1)
account_name = params[0]
share_name = params[1]
account_key = params[2]
with open(fstab_file_path, "a") as file:
print("Mounting primary")
file.write(
"\n//{0}.file.core.windows.net/{1} {2} cifs username={0},password={3},{4}".format(
account_name,
share_name,
primary_mount_folder,
account_key,
default_mount_options_cifs,
)
)
if __name__ == "__main__":
main()
|
nachandr/cfme_tests | cfme/test_framework/pytest_plugin.py | Python | gpl-2.0 | 985 | 0.001015 | """
cfme main plugin
This provides the option group and disables pytest logging plugin
Also provides uncollection stats during testrun/collection
"""
import pytest
@pytest.ho | okimpl(tryfirst=True)
def pytest_addoption(parser):
# Create the cfme option group for use in other plugins
parser.getgroup('cfme', 'cfme: options related to cfme/miq appliances')
def pytest_configure(config):
# also disable the pytest logging system since its triggering | issues with our own
config.pluginmanager.set_blocked('logging-plugin')
def pytest_collection_finish(session):
from cfme.fixtures.pytest_store import store
store.terminalreporter.write(
"Uncollection Stats:\n", bold=True)
for reason, value in store.uncollection_stats.items():
store.terminalreporter.write(
f" {reason}: {value}\n", bold=True)
store.terminalreporter.write(
" {} tests left after all uncollections\n".format(len(session.items)),
bold=True)
|
jodal/comics | comics/comics/lunche24.py | Python | agpl-3.0 | 607 | 0 | from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Lunch (e24.no)"
language = "no"
url = "http:// | www.e24.no/lunch/"
start_date = "2009-10-21"
rights = "Børge Lund"
class Crawler(CrawlerBase):
history_capable_date = "2012-11-02"
schedule = "Mo,Tu,We,Th,Fr,Sa"
time_zone = "Europe/Oslo"
def crawl(self, pub_date):
url = "http://static.e24.no/images/comics/lunch_%s.gif" % | (
pub_date.strftime("%Y%m%d")
)
return CrawlerImage(url)
|
tph-thuering/vnetsource | ts_om_viz/forms.py | Python | mpl-2.0 | 571 | 0.010508 | # -*- coding: utf-8 -*-
from django import forms
class DocumentForm(forms.Form):
xmlfile = forms.FileField(label='Select the input xml file (scenario.xml)')
outputfile = forms.FileField(label='Select the survery output file (output.txt) if available', required=Fal | se)
ctsoutputfile = forms.FileField(label='Select the continuous file (ctsout.txt) if available', required=False)
save_to = forms.BooleanField(initial=False, label='Save to My | Scenarios', required=False)
scenario_label = forms.CharField(label="Scenario name (optional)", required=False) |
zubie7a/Algorithms | LeetCode/02_Medium/lc_435.py | Python | mit | 827 | 0.001209 | # 435 - Non Overlapping Intervals (Medium)
# https://leetcode.com/problems/non-overlapping-intervals/
# How many intervals have to be erased from a list so that all
# remaining intervals are not overlapping?
class Solution(object):
def eraseOverlapIntervals(self, intervals):
"""
:type intervals: List[Interval]
:rtype: int
"""
sortIntervals = sorted(intervals, key=lambda x: x[1])
end = -1<<31
er | ased = 0
# Sort intervals by "end" key, and check that each interval's
# "start" is no lesser than the pr | eviously biggest "end" found.
# Those that are lesser then are overlapping.
for intv in sortIntervals:
if intv[0] >= end:
end = intv[1]
else:
erased += 1
return erased
|
evanunderscore/defopt | examples/short.py | Python | gpl-3.0 | 582 | 0 | """
Example showing short flags in defopt.
Y | ou can add alternative short flags to arguments by passing a
dictionary to `defopt.run` which maps flag names to single letters.
Code usage::
>>> | main(count=2)
Command line usage::
$ python short.py -C 2
$ python short.py --count 2
"""
import defopt
def main(*, count: int = 1):
"""
Example function which prints a message.
:param count: Number of times to print the message
"""
for _ in range(count):
print('hello!')
if __name__ == '__main__':
defopt.run(main, short={'count': 'C'})
|
frederica07/Dragon_Programming_Process | PyOpenGL-3.0.2/OpenGL/raw/GL/ARB/compressed_texture_pixel_storage.py | Python | bsd-2-clause | 743 | 0.004038 | '''Autogenerated by get_gl_extensions script, do not edit!'''
from OpenGL import platform as _p
from OpenGL.GL import glget
EXTENSION_NAME = 'GL_ARB_compressed_texture_pixel_storage'
_p.unpack_constants( """GL_UNPACK_COMPRESSED_BLOCK_WIDTH 0x9127
GL_UNPACK_COMPRESSED_BLOCK_HEIGHT 0x9128
GL_UNPACK_COMPRESSED_BLOCK_DEPTH 0x9129
GL_UNPACK_COMPRESSED_BLOCK_SIZE 0x912A
GL_PACK_COMPRESSED_BLOCK_WIDTH 0x912B
GL_PACK_COMPRESSED_BLOCK_HEIGHT 0x912C
GL_PACK_COMPRESSED_BLOCK_DEPTH 0x912D
GL_PACK_COMPRESSED_BLOCK_SIZE 0x912E""", globals())
def glInitCompressedTexturePixelStorageARB():
'''Return boolean indi | cating whether this extension is availab | le'''
from OpenGL import extensions
return extensions.hasGLExtension( EXTENSION_NAME )
|
CloverHealth/temporal-sqlalchemy | temporal_sqlalchemy/tests/conftest.py | Python | bsd-3-clause | 1,589 | 0.000629 | """ pytest fixtures for test suite """
import pytest
import sqlalchemy as sa
import sqlalchemy.orm as orm
import testing.postgresql
import temporal_sqlalchemy as temporal
from . import models
@p | ytest.yield_fixture(scope='session')
def engine():
"""Creates a postgres database for testing, returns a sqlalchemy engine"""
db = testing.postgresql.Postgresql()
engine_ = sa.create_engine(db.url())
yield engine_
engine_.dispose()
db.stop()
@pytest.yield_fixture(scope='session')
def connection(engine): # pylint: disable=redefined-outer-name
"""Session-wide test database | ."""
conn = engine.connect()
for extension in ['uuid-ossp', 'btree_gist']:
conn.execute("""\
CREATE EXTENSION IF NOT EXISTS "%s"
WITH SCHEMA pg_catalog
""" % extension)
for schema in [models.SCHEMA, models.TEMPORAL_SCHEMA]:
conn.execute('CREATE SCHEMA IF NOT EXISTS ' + schema)
models.basic_metadata.create_all(conn)
yield conn
conn.close()
@pytest.yield_fixture(scope="session")
def sessionmaker():
""" yields a temporalized sessionmaker -- per test session """
Session = orm.sessionmaker()
yield temporal.temporal_session(Session)
Session.close_all()
@pytest.yield_fixture()
def session(connection: sa.engine.Connection, sessionmaker: orm.sessionmaker): # pylint: disable=redefined-outer-name
""" yields temporalized session -- per test """
transaction = connection.begin()
sess = sessionmaker(bind=connection)
yield sess
transaction.rollback()
sess.close()
|
armab/st2contrib | packs/sensu/actions/silence.py | Python | apache-2.0 | 833 | 0 | #!/usr/bin/python
from lib import sensu
import argparse
import json
import time
parser = argparse.ArgumentParser(description='Sensu Silence Actions')
parser.add_argument('--client', nargs='?', required=True)
parser.add_argument('--check', nargs='?', default=False) |
parser.add_argument('--expiration', nargs='?', default=False)
parser.add_argument('--message', default="Stash created by StackStorm")
args = parser.parse_args()
stashes = sensu.Stashes('config.yaml')
data = {}
data['message'] = args.message
current_time = time.time()
data['timestamp'] = int(curren | t_time)
if args.expiration:
data['expire'] = int(args.expiration)
else:
expiration = False
path = "silence/%s" % args.client
if args.check:
path = "%s/%s" % (path, args.check)
data['path'] = path
print(stashes.post_by_path(path, json.dumps(data)))
|
sipwise/repoapi | repoapi/models/__init__.py | Python | gpl-3.0 | 825 | 0 | # Copyright (C) 2015-2020 The Sipwise Team - http://sipwise.com
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implie | d warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PUR | POSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from .gri import GerritRepoInfo # noqa
from .jbi import JenkinsBuildInfo # noqa
from .wni import WorkfrontNoteInfo # noqa
|
openqt/algorithms | projecteuler/pe598-split-divisibilities.py | Python | gpl-3.0 | 660 | 0.010606 | #!/usr/bin/env python
# coding=utf-8
"""598. Split Divis | ibilities
https://projecteuler.net/problem=598
Consider the number 48.
There are five pairs of integers $a$ and $b$ ($a \leq b$) such that $a \times
b=48$: (1,48), (2,24), (3,16), (4,12) and (6,8).
It can be seen that both 6 and 8 have 4 divisors.
So of those five pairs one consists of two integers with the same number of
divisors.
In gener | al:
Let $C(n)$ be the number of pairs of positive integers $a \times b=n$, ($a
\leq b$) such that $a$ and $b$ have the same number of divisors;
so $C(48)=1$.
You are given $C(10!)=3$: (1680, 2160), (1800, 2016) and (1890,1920).
Find $C(100!)$
"""
|
adaptive-learning/robomission | backend/learn/tests/test_mastery.py | Python | gpl-3.0 | 5,319 | 0.000188 | import pytest
from learn.models import Task, ProblemSet, Domain
from learn.models import Student, TaskSession, Skill
from learn.mastery import has_mastered, get_level
from learn.mastery import get_first_unsolved_mission
from learn.mastery import get_first_unsolved_phase
from learn.mastery import get_current_mission_phase
# Django DB is always needed for many-to-many relations (chunks.tasks)
@pytest.mark.django_db
def test_has_mastered__initially_not():
ps = ProblemSet.objects.create()
ps.add_task()
student = Student.objects.create()
assert not has_mastered(student, ps)
# django db is always needed for many-to-many relations (student.skills)
# todo: find a way how to test the following without using db.
@pytest.mark.django_db
def test_has_mastered__when_skill_is_1():
ps = ProblemSet.objects.create()
student = Student.objects.create()
Skill.objects.create(student=student, chunk=ps, value=1.0)
assert has_mastered(student, ps)
@pytest.mark.django_db
def test_has_mastered__mastered_parts():
m1 = ProblemSet.objects.create()
p1 = m1.add_part()
p2 = m1.add_part()
student = Student.objects.create()
Skill.objects.create(student=student, chunk=m1, value=1)
Skill.objects.create(student=student, chunk=p1, value=1)
Skill.objects.create(student=student, chunk=p2, value=1)
assert has_mastered(student, m1)
@pytest.mark.django_db
def test_has_mastered__not_when_skill_is_low():
ps = ProblemSet.objects.create()
student = Student.objects.create()
Skill.objects.create(student=student, chunk=ps, value=0.5)
assert not has_mastered(student, ps)
@pytest.mark.django_db
def test_has_mastered__not_unmastered_subchunk | ():
m1 = ProblemSet.objects.create()
p1 = m1.add_part()
p2 = m1.add_part()
student = Student.objects.create()
Skill.objects.create(student=student, chunk=m1, value=1)
Skill.objects.create(student=student, chunk=p1, value=1)
Skill.objects.create(student=student, chunk=p2, value=0)
assert not has_mastered(student, m1)
@pytest.mark.django_db
def test_get_first_unsolved_mission__single():
mission = ProblemSet.objects.create()
domain = Domain.obj | ects.create()
domain.problemsets.set([mission])
student = Student.objects.create()
assert get_first_unsolved_mission(domain, student) == mission
@pytest.mark.django_db
def test_get_first_unsolved_mission__all_unsolved():
mission1 = ProblemSet.objects.create(section='1')
mission2 = ProblemSet.objects.create(section='2')
domain = Domain.objects.create()
domain.problemsets.set([mission1, mission2])
student = Student.objects.create()
assert get_first_unsolved_mission(domain, student) == mission1
@pytest.mark.django_db
def test_get_first_unsolved_mission__first_solved():
mission1 = ProblemSet.objects.create(section='1')
mission2 = ProblemSet.objects.create(section='2')
domain = Domain.objects.create()
domain.problemsets.set([mission1, mission2])
student = Student.objects.create()
Skill.objects.create(student=student, chunk=mission1, value=1)
assert get_first_unsolved_mission(domain, student) == mission2
@pytest.mark.django_db
def test_get_first_unsolved_phase__all_unsolved():
m1 = ProblemSet.objects.create()
p1 = m1.add_part()
m1.add_part()
student = Student.objects.create()
assert get_first_unsolved_phase(m1, student) == p1
@pytest.mark.django_db
def test_get_first_unsolved_phase__first_solved():
m1 = ProblemSet.objects.create()
p1 = m1.add_part()
p2 = m1.add_part()
student = Student.objects.create()
Skill.objects.create(student=student, chunk=p1, value=1)
assert get_first_unsolved_phase(m1, student) == p2
@pytest.mark.django_db
def test_get_first_unsolved_phase__all_solved():
m1 = ProblemSet.objects.create()
p1 = m1.add_part()
student = Student.objects.create()
Skill.objects.create(student=student, chunk=p1, value=1)
Skill.objects.create(student=student, chunk=m1, value=1)
assert get_first_unsolved_phase(m1, student) == None
@pytest.mark.django_db
def test_get_mission_phase__all_solved():
domain = Domain.objects.create()
m1 = ProblemSet.objects.create()
p1 = m1.add_part()
domain.problemsets.set([m1, p1])
student = Student.objects.create()
Skill.objects.create(student=student, chunk=p1, value=1)
Skill.objects.create(student=student, chunk=m1, value=1)
assert get_current_mission_phase(domain, student) == (None, None)
@pytest.mark.django_db
def test_get_level_for_new_student():
mission = ProblemSet.objects.create()
domain = Domain.objects.create()
domain.problemsets.set([mission])
student = Student.objects.create()
assert get_first_unsolved_mission(domain, student) == mission
assert get_level(domain, student) == 1
@pytest.mark.django_db
def test_level_is_number_of_solved_missions_plus_1():
m1 = ProblemSet.objects.create()
m2 = ProblemSet.objects.create()
m3 = ProblemSet.objects.create()
domain = Domain.objects.create()
domain.problemsets.set([m1, m2, m3])
student = Student.objects.create()
Skill.objects.create(student=student, chunk=m1, value=1)
Skill.objects.create(student=student, chunk=m3, value=1)
assert get_level(domain, student) == 3
|
cislaa/prophy | prophyc/tests/test_model.py | Python | mit | 29,060 | 0.007123 | from prophyc import model
def test_typedef_repr(): |
typedef = model.Typedef("my_typedef", "u8")
assert str(typedef) == "u8 my_typedef"
def test_struct_repr():
struct = model.Struct("MyStruct", [
model.St | ructMember("a", "u8"),
model.StructMember("b", "u16", bound = 'xlen'),
model.StructMember("c", "u32", size = 5),
model.StructMember("d", "u64", bound = 'xlen', size = 5),
model.StructMember("e", "UU", unlimited = True),
model.StructMember("f", "UUUU", optional = True)
])
assert str(struct) == """\
MyStruct
u8 a
u16 b<>(xlen)
u32 c[5]
u64 d<5>(xlen)
UU e<...>
UUUU* f
"""
def test_union_repr():
union = model.Union("MyUnion", [
model.UnionMember("a", "u8", 1),
model.UnionMember("b", "u16", 2),
model.UnionMember("c", "u32", 3)
])
assert str(union.members[0]) == "1: u8 a"
assert str(union.members[1]) == "2: u16 b"
assert str(union.members[2]) == "3: u32 c"
assert str(union) == """\
MyUnion
1: u8 a
2: u16 b
3: u32 c
"""
def test_split_after():
generator = model.split_after([1, 42, 2, 3, 42, 42, 5], lambda x: x == 42)
assert [x for x in generator] == [[1, 42], [2, 3, 42], [42], [5]]
def test_model_sort_enums():
nodes = [model.Typedef("B", "A"),
model.Typedef("C", "A"),
model.Enum("A", [])]
model.topological_sort(nodes)
assert ["A", "B", "C"] == [node.name for node in nodes]
def test_model_sort_typedefs():
nodes = [model.Typedef("A", "X"),
model.Typedef("C", "B"),
model.Typedef("B", "A"),
model.Typedef("E", "D"),
model.Typedef("D", "C")]
model.topological_sort(nodes)
assert ["A", "B", "C", "D", "E"] == [node.name for node in nodes]
def test_model_sort_structs():
nodes = [model.Struct("C", [model.StructMember("a", "B"),
model.StructMember("b", "A"),
model.StructMember("c", "D")]),
model.Struct("B", [model.StructMember("a", "X"),
model.StructMember("b", "A"),
model.StructMember("c", "Y")]),
model.Struct("A", [model.StructMember("a", "X"),
model.StructMember("b", "Y"),
model.StructMember("c", "Z")])]
model.topological_sort(nodes)
assert ["A", "B", "C"] == [node.name for node in nodes]
def test_model_sort_struct_with_two_deps():
nodes = [model.Struct("C", [model.StructMember("a", "B")]),
model.Struct("B", [model.StructMember("a", "A")]),
model.Struct("A", [model.StructMember("a", "X")])]
model.topological_sort(nodes)
assert ["A", "B", "C"] == [node.name for node in nodes]
def test_model_sort_struct_with_multiple_dependencies():
nodes = [model.Struct("D", [model.StructMember("a", "A"),
model.StructMember("b", "B"),
model.StructMember("c", "C")]),
model.Struct("C", [model.StructMember("a", "A"),
model.StructMember("b", "B")]),
model.Struct("B", [model.StructMember("a", "A")]),
model.Typedef("A", "TTypeX")]
model.topological_sort(nodes)
assert ["A", "B", "C", "D"] == [node.name for node in nodes]
def test_model_sort_union():
nodes = [model.Typedef("C", "B"),
model.Union("B", [model.UnionMember("a", "A", "0"),
model.UnionMember("b", "A", "1")]),
model.Struct("A", [model.StructMember("a", "X")])]
model.topological_sort(nodes)
assert ["A", "B", "C"] == [node.name for node in nodes]
def test_model_sort_constants():
nodes = [model.Constant("C_C", "C_A + C_B"),
model.Constant("C_A", "1"),
model.Constant("C_B", "2")]
model.topological_sort(nodes)
assert [("C_A", "1"), ("C_B", "2"), ("C_C", "C_A + C_B")] == nodes
def test_cross_reference_structs():
nodes = [
model.Struct("A", [
model.StructMember("a", "u8")
]),
model.Struct("B", [
model.StructMember("a", "A"),
model.StructMember("b", "u8")
]),
model.Struct("C", [
model.StructMember("a", "A"),
model.StructMember("b", "B"),
model.StructMember("c", "NON_EXISTENT")
]),
model.Struct("D", [
model.StructMember("a", "A"),
model.StructMember("b", "B"),
model.StructMember("c", "C")
])
]
model.cross_reference(nodes)
definition_names = [[x.definition.name if x.definition else None for x in y.members] for y in nodes]
assert definition_names == [
[None],
['A', None],
['A', 'B', None],
['A', 'B', 'C']
]
def test_cross_reference_typedef():
nodes = [
model.Struct("A", [
model.StructMember("a", "u8")
]),
model.Typedef("B", "A"),
model.Struct("C", [
model.StructMember("a", "A"),
model.StructMember("b", "B")
]),
model.Typedef("D", "B")
]
model.cross_reference(nodes)
assert nodes[1].definition.name == "A"
assert nodes[2].members[1].definition.definition.name == "A"
assert nodes[3].definition.name == "B"
assert nodes[3].definition.definition.name == "A"
def test_cross_symbols_from_includes():
nodes = [
model.Include('x', [
model.Include('y', [
model.Typedef('ala', 'u32')
]),
model.Struct('ola', [
model.StructMember('a', 'ala'),
])
]),
model.Struct('ula', [
model.StructMember('a', 'ola'),
model.StructMember('b', 'ala'),
])
]
model.cross_reference(nodes)
assert nodes[1].members[0].definition.name == 'ola'
assert nodes[1].members[1].definition.name == 'ala'
# cross-reference only needs to link definitions of first level of nodes
assert nodes[0].nodes[1].members[0].definition == None
def test_cross_reference_array_size_from_includes():
nodes = [
model.Include('x', [
model.Include('y', [
model.Constant('NUM', '3'),
]),
model.Enum('E', [
model.EnumMember('E1', '1'),
model.EnumMember('E3', 'NUM')
]),
]),
model.Struct('X', [
model.StructMember('x', 'u32', size = 'NUM'),
model.StructMember('y', 'u32', size = 'E1'),
model.StructMember('z', 'u32', size = 'UNKNOWN'),
model.StructMember('a', 'u32', size = 'E3')
])
]
model.cross_reference(nodes)
assert nodes[1].members[0].numeric_size == 3
assert nodes[1].members[1].numeric_size == 1
assert nodes[1].members[2].numeric_size == None
assert nodes[1].members[3].numeric_size == 3
def test_cross_reference_numeric_size_of_expression():
nodes = [
model.Constant('A', 12),
model.Constant('B', 15),
model.Constant('C', 'A*B'),
model.Struct('X', [
model.StructMember('x', 'u32', size = 'C'),
])
]
model.cross_reference(nodes)
assert nodes[3].members[0].numeric_size == 180
def test_cross_reference_expression_as_array_size():
nodes = [
model.Struct('X', [
model.StructMember('x', 'u32', size = '2 * 3'),
])
]
model.cross_reference(nodes)
assert nodes[0].members[0].numeric_size == 6
class WarnFake(object):
def __init__(self):
self.msgs = []
def __call__(self, msg):
self.msgs.append(msg)
def test_cross_reference_typedef_warnings():
nodes = [model.Typedef('X', 'Unknown')]
warn = WarnFake()
model.cross_reference(nodes, warn)
assert warn.msgs == ["type 'Unknown' not found"]
def test_cross_reference_struct_warnings():
nodes = [model.Struct('X', [model.StructMember('x', 'TypeUnknown', size = '12 + NumUnknown')])]
warn = WarnFake()
model.cros |
thaim/ansible | lib/ansible/plugins/doc_fragments/oracle_display_name_option.py | Python | mit | 436 | 0.004587 | # Copyright (c) 2018, Oracle and/or its affiliates.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocF | ragment(object):
DOCUMENTATION = """
options:
display_name:
description: Use I(display_name) along with the other options to return only resources that match the given
dis | play name exactly.
type: str
"""
|
balazsdukai/batch3dfier | batch3dfier/db.py | Python | gpl-3.0 | 2,117 | 0 | # -*- codi | ng: utf-8 -*-
"""Database connection class."""
import psycopg2
class db(object):
"""A database connection class """
def __init__(self, dbname, host, port, user, password):
self.dbname = dbname
self.host = host
self.port = port
self.user = user
self.password = password
try:
| self.conn = psycopg2.connect(
"dbname=%s host=%s port=%s \
user=%s password=%s" %
(dbname, host, port, user, password))
print("Opened database successfully")
except BaseException:
print("I'm unable to connect to the database. Exiting function.")
def sendQuery(self, query):
"""Send a query to the DB when no results need to return (e.g. CREATE)
Parameters
----------
query : str
Returns
-------
nothing
"""
with self.conn:
with self.conn.cursor() as cur:
cur.execute(query)
def getQuery(self, query):
"""DB query where the results need to return (e.g. SELECT)
Parameters
----------
query : str
SQL query
Returns
-------
psycopg2 resultset
"""
with self.conn:
with self.conn.cursor() as cur:
cur.execute(query)
return(cur.fetchall())
def vacuum(self, schema, table):
"""Vacuum analyze a table
Parameters
----------
schema : str
schema name
table : str
table name
Returns
-------
nothing
"""
self.conn.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
schema = psycopg2.sql.Identifier(schema)
table = psycopg2.sql.Identifier(table)
query = psycopg2.sql.SQL("""
VACUUM ANALYZE {schema}.{table};
""").format(schema=schema, table=table)
self.sendQuery(query)
def close(self):
""" """
self.conn.close()
|
manassolanki/erpnext | erpnext/hr/doctype/expense_claim_type/test_expense_claim_type.py | Python | gpl-3.0 | 317 | 0.009464 | # -*- coding: | utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
# test_records = frappe.get_test_records('Expense Claim Type')
class TestExpenseCla | imType(unittest.TestCase):
pass
|
mbrucher/ATK-plugins | ATKStereoPhaser/update_version.py | Python | bsd-3-clause | 4,450 | 0.020449 | #!/usr/bin/python
# this script will update the versions in plist and installer files to match that in resource.h
import plistlib, os, datetime, fileinput, glob, sys, string
scriptpath = os.path.dirname(os.path.realpath(__file__))
def replacestrs(filename, s, r):
files = glob.glob(filename)
for line in fileinput.input(files,inplace=1):
line = line.replace(s, r)
sys.stdout.write(line)
def main():
MajorStr = ""
MinorStr = ""
BugfixStr = ""
for line in fileinput.input(scriptpath + "/resource.h",inplace=0):
if "#define PLUG_VER " in line:
FullVersion = int(line.lstrip("#define PLUG_VER "), 16)
major = FullVersion & 0xFFFF0000
MajorStr = str(major >> 16)
minor = FullVersion & 0x0000FF00
MinorStr = str(minor >> 8)
BugfixStr = str(FullVersion & 0x000000FF)
FullVersionStr = MajorStr + "." + MinorStr + "." + BugfixStr
today = datetime.date.today()
CFBundleGetInfoString = FullVersionStr + ", Copyright MatthieuBrucher, " + str(today.year)
CFBundleVersion = FullVersionStr
print("update_version.py - setting version to " + FullVersionStr)
print("Updating plist version info...")
plistpath = scriptpath + "/resources/ATKStereoPhaser-VST2-Info.plist"
print(plistpath)
vst2 = plistlib.readPlist(plistpath)
vst2['CFBundleGetInfoString'] = CFBundleGetInfoString
vst2['CFBundleVersion'] = CFBundleVersion
vst2['CFBundleShortVersionString'] = CFBundleVersion
plistlib.writePlist(vst2, plistpath)
replacestrs(plistpath, "//Apple//", "//Apple Computer//");
plistpath = scriptpath + "/resources/ATKStereoPhaser-AU-Info.plist"
au = plistlib.readPlist(plistpath)
au['CFBundleGetInfoString'] = CFBundleGetInfoString
au['CFBundleVersion'] = CFBundleVersion
au['CFBundleShortVersionString'] = CFBundleVersion
plistlib.writePlist(au, plistpath)
replacestrs(plistpath, "//Apple//", "//Apple Computer//");
plistpath = scriptpath + "/resources/ATKStereoPhaser-VST3-Info.plist"
vst3 = plistlib.readPlist(plistpath)
vst3['CFBundleGetInfoString'] = CFBundleGetInfoString
vst3['CFBundleVersion'] = CFBundleVersion
vst3['CFBundleShortVersionString'] = CFBundleVersion
plistlib.writePlist(vst3, plistpath)
replacestrs(plistpath, "//Apple//", "//Apple Computer//");
plistpath = scriptpath + "/resources/ATKStereoPhaser-OSXAPP-Info.plist"
app = plistlib.readPlist(plistpath)
app['CFBundleGetInfoString'] = CFBundleGetInfoString
app['CFBundleVersion'] = CFBundleVersion
app['CFBundleShortVersionString'] = CFBundleVersion
plistlib.writePlist(app, plistpath)
replacestrs(plistpath, "//Apple//", "//Apple Computer//");
# plistpath = scriptpath + "/resources/ATKStereoPhaser-RTAS-Info.plist"
# rtas = plistlib.readPlist(plistpath)
# | rtas['CFBundleGetInfoString'] = CFBundleGetInfoString
# rtas['CFBundleVersion'] = CFBundleVersion
# rtas['CFBundleShortVersionString'] = CFBundleVersion
# plistlib.writePlist(rtas, plistpath)
# replacestrs(plistpath, "//Apple//", "//Apple Computer//");
#
# plistpath = scriptpath + "/resources/ATKStereoPhaser-AAX-Info.plist"
# aax = plistlib.readPlist(plistpath)
# aax['CFBundleGetInfoString'] = C | FBundleGetInfoString
# aax['CFBundleVersion'] = CFBundleVersion
# aax['CFBundleShortVersionString'] = CFBundleVersion
# plistlib.writePlist(aax, plistpath)
# replacestrs(plistpath, "//Apple//", "//Apple Computer//");
# plistpath = scriptpath + "/resources/ATKStereoPhaser-IOSAPP-Info.plist"
# iosapp = plistlib.readPlist(plistpath)
# iosapp['CFBundleGetInfoString'] = CFBundleGetInfoString
# iosapp['CFBundleVersion'] = CFBundleVersion
# iosapp['CFBundleShortVersionString'] = CFBundleVersion
# plistlib.writePlist(iosapp, plistpath)
# replacestrs(plistpath, "//Apple//", "//Apple Computer//");
print("Updating Mac Installer version info...")
plistpath = scriptpath + "/installer/ATKStereoPhaser.pkgproj"
installer = plistlib.readPlist(plistpath)
for x in installer['PACKAGES']:
x['PACKAGE_SETTINGS']['VERSION'] = FullVersionStr
plistlib.writePlist(installer, plistpath)
replacestrs(plistpath, "//Apple//", "//Apple Computer//");
print("Updating Windows Installer version info...")
for line in fileinput.input(scriptpath + "/installer/ATKStereoPhaser.iss",inplace=1):
if "AppVersion" in line:
line="AppVersion=" + FullVersionStr + "\n"
sys.stdout.write(line)
if __name__ == '__main__':
main()
|
zygmuntz/kaggle-bestbuy_small | train.py | Python | mit | 1,432 | 0.066341 | 'http://fastml.com/best-buy-mobile-contest/'
import sys, csv, re
def prepare( query ):
query = re.sub( r'[\W]', '', query )
query = query.lower()
return query
popular_skus = [9854804, 2107458, 2541184, 2670133, 2173065]
input_file = sys.argv[1]
test_file = sys.argv[2]
output_file = sys.argv[3]
i = open( input_file )
reader = csv.reader( i )
t = open( test_file )
headers = reader.next()
mapping = {}
for line in reader:
query = line[3]
sku = line[1]
# print "%s -> %s" % ( query, sku )
query = prepare( query )
try:
mapping[query][sku] += 1
except KeyError:
try:
mapping[query][sku] = 1
except KeyError:
mapping[query] = {}
mapping[query][sku] = 1
#print mapping
#sys.exit( 0 )
reader = csv.reader( t )
headers = reader.next()
o = open( output_file, 'wb' )
writer = | csv.writer( o, delimiter = " " )
n = 0 # all test cases
m = 0 # the ones we have mapping for
for line in reader:
n += 1
query = line[2]
query = prepare( query )
if query in mapping:
m += 1
skus = []
for sku in sorted( mapping[query], key=mapping[query].get, reverse = True ):
skus.append( sku )
#print skus
'''
if len( mapping[query] ) > 1:
print "mapping:"
print mapping[query]
print "skus:"
print skus
'''
skus.extend( popular_skus )
| skus = skus[0:5]
else:
skus = popular_skus
writer.writerow( skus )
print "Used mapping in %s / %s (%s)" % ( m, n, 1.0 * m / n ) |
LucidBlue/mykeepon-storyteller | src/realTimeAudio.py | Python | bsd-3-clause | 1,209 | 0.019024 | import ui_plot
import sys
import numpy
from PyQt4 import QtCore, QtGui
import PyQt4.Qwt5 as Qwt
from recorder import *
def plotSomething():
if SR.newAudio==False:
| return
xs,ys=SR.fft()
c.setData(xs,ys)
uiplot.qwtPlot.replot()
SR.newAudio=False
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
win_plot = ui_plot.QtGui.QMainWindow()
uiplot = ui_plot.Ui_win_plot()
uiplot.setupUi(win_plot)
uiplot.btnA.clic | ked.connect(plotSomething)
#uiplot.btnB.clicked.connect(lambda: uiplot.timer.setInterval(100.0))
#uiplot.btnC.clicked.connect(lambda: uiplot.timer.setInterval(10.0))
#uiplot.btnD.clicked.connect(lambda: uiplot.timer.setInterval(1.0))
c=Qwt.QwtPlotCurve()
c.attach(uiplot.qwtPlot)
uiplot.qwtPlot.setAxisScale(uiplot.qwtPlot.yLeft, 0, 1000)
uiplot.timer = QtCore.QTimer()
uiplot.timer.start(1.0)
win_plot.connect(uiplot.timer, QtCore.SIGNAL('timeout()'), plotSomething)
SR=SwhRecorder()
SR.setup()
SR.continuousStart()
### DISPLAY WINDOWS
win_plot.show()
code=app.exec_()
SR.close()
sys.exit(code)
|
CharlesGust/django-imagr | imagr_site/imagr_app/migrations/0003_auto_20141114_1441.py | Python | mit | 1,007 | 0 | # -*- | coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('imagr_app', '0002_auto_20141106_1538'),
]
operations = [
migrations.AlterField(
m | odel_name='album',
name='date_published',
field=models.DateField(blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='album',
name='title',
field=models.CharField(max_length=60),
preserve_default=True,
),
migrations.AlterField(
model_name='photo',
name='date_published',
field=models.DateField(blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='photo',
name='title',
field=models.CharField(max_length=60),
preserve_default=True,
),
]
|
Qwaz/solved-hacking-problem | DEFCON/2020 Quals/ooo-flag-sharing/chal.original.py | Python | gpl-2.0 | 8,529 | 0.009849 | #!/usr/bin/env python3
import gensafeprime
import contextlib
import textwrap
import hashlib
import fuckpy3 #pylint:disable=unused-import
import random
import numpy as np
import ast
import os
import re
banner = r"""
___ ___ ___ _____ _
/ _ \ / _ \ / _ \ | ___| | __ _ __ _
| | | | | | | | | | | |_ | |/ _` |/ _` |
| |_| | |_| | |_| | | _| | | (_| | (_| |
\___/ \___/ \___/ |_| |_|\__,_|\__, |
|___/
____ _ _ ____ _
/ ___|| |__ __ _ _ __(_)_ __ __ _ / ___| ___ _ ____ _(_) ___ ___
\___ \| '_ \ / _` | '__| | '_ \ / _` | \___ \ / _ \ '__\ \ / / |/ __/ _ \
___) | | | | (_| | | | | | | | (_| | ___) | __/ | \ V /| | (_| __/
|____/|_| |_|\__,_|_| |_|_| |_|\__, | |____/ \___|_| \_/ |_|\___\___|
|___/
"""
#
# Matrix stuff
#
def pascal_matrix(n, k):
matrix = np.ones((n, k)).astype(int)
for r in range(1, n):
for c in range(1, k):
matrix[r,c] = matrix[r,c-1] + matrix[r-1,c]
assert np.linalg.matrix_rank(matrix) == k
m = [ list(map(int, row)) for row in matrix ]
return m
def random_matrix(n, k):
matrix = [ list(map(int, row)) for row in (np.random.rand(n,k)*1000).astype(int) ]
assert np.linalg.matrix_rank(matrix) == k
return matrix
def calc_det(A):
n,_ = np.shape(A)
if n== 1:
return A[0,0]
else:
S=0
for i in range(n):
L = [x for x in range(n) if x != i]
S += (-1)**i *A[0,i]*calc_det(A[1:,L])
return int(S)
#
# OOO Secret Sharing Scheme
#
def split_secret(key, n, k, matrix):
assert len(matrix) == n, "misshaped matrix"
assert len(matrix[0]) == k, "misshaped matrix"
x = [ int.from_bytes(key, byteorder='little') ]
for _ in range(k-1):
x.append(random.randint(0, P))
x = np.array(x)
shares = [ (n,int(i)) for n,i in enumerate(np.dot(matrix, x)) ]
return shares[1:]
def reconstitute_secret(keys, matrix):
k = len(matrix[0])
assert k <= len(keys), "not enough keys"
assert np.linalg.matrix_rank(matrix) == k, "linearly dependent keys"
subkeys = sorted(keys[:k])
submatrix = [ matrix[e[0]] for e in subkeys ]
subshares = [ e[-1] for e in subkeys ]
det = calc_det(np.array(submatrix))
inv_float = np.linalg.inv(submatrix)
key = (int(sum([ i*j for i,j in zip([ i*pow(det, -1, P) for i in [ int(round(h)) for h in [ det * inv_float[0][i] for i in range(k) ] ] ], subshares) ])) % P).to_bytes(32, byteorder='little')
return key
#
# Menu library
#
def one_menu(items, done_option=True, default=None):
choices = [ None ]
for item in items:
if type(item) in (str, bytes):
print(item)
else:
print("%d <- %s" % (len(choices), item[0]))
choices.append(item[1])
if done_option:
print("0 <- Done.")
cstr = input("Choice: ")
if not cstr:
return default if default is not None else one_menu(items, done_option=done_option)
choice = int(cstr)
assert 0 <= choice < len(choices), "Invalid choice!"
assert choice or done_option, "Invalid choice!"
return choices[choice]
def menu(*items, do_while=False, loop=False, done_option=False, default=None):
if do_while: yield True
c = default
while True:
c = one_menu(items, done_option=done_option, default=c)
if callable(c): yield c()
elif c is None: break
else: yield c
if not loop: break
#
# Menu handlers
#
def share_user_flag():
secret = input("Enter secret to share: ").bytes()
secret_id = hashlib.md5(secret).hexdigest()[:6]
print("Your secret's ID is:", secret_id)
shares = split_secret(secret, N, K, M)
random.shuffle(shares)
total_shares = int(input("Number of shares to make: "))
assert total_shares >= K, "Too few shares; you won't be able to reconstitute the secret!"
with open(os.path.join(SHAREDIR, secret_id+".1"), "w") as f:
f.write(str(shares[0]))
print("Your shares are:", shares[1:total_shares])
print("Your stored share is safe with us!")
def redeem_user_flag():
secret_id = input("Enter the secret's ID: ")
assert re.match(r"^\w\w\w\w\w\w$", secret_id), "Invalid ID format!"
user_shares = ast.literal_eval(input("Enter your shares of the secret: "))
stored_share = ast.literal_eval(open(os.path.join(SHAREDIR, secret_id+".1")).read().strip())
shares = user_shares + [ stored_share ]
secret = reconstitute_secret(shares, M).strip(b"\x00")
print("Your secret is:", secret)
def share_actual_flag():
the_flag = open(os.path.join(os.path.dirname(__file__), "flag"), "rb").read().strip()
shares = split_secret(the_flag, N, K, M)
sanity_flag = reconstitute_secret(shares, M).strip(b"\x00")
assert sanity_flag == the_flag
random.shuffle(shares)
secret_id = os.urandom(3).hex()
with open(os.path.join(SHAREDIR, secret_id+".1"), "w") as f:
f.write(str(shares[0]))
with open(os.path.join(SHAREDIR, secret_id+".2"), "w") as f:
f.write(str(shares[1]))
print("Our secret's ID is:", secret_id)
print("Your shares are:", shares[2:K])
print("Our stored shares are quite safe with us!")
def redeem_actual_flag():
secret_id = input("Enter the secret's ID: ")
assert re.match(r"^\w\w\w\w\w\w$", secret_id), "Invalid ID format!"
user_shares = ast.literal_eval(input("Enter your shares of the secret: "))
stored_share1 = ast.literal_eval(open(os.path.join(SHAREDIR, secret_id+".1")).read().strip())
stored_share2 = ast.literal_eval(open(os.path.join(SHAREDIR, secret_id+".2")).read().strip())
shares = [ stored_share1, stored_share2 ] + user_shares
assert len(set(s[0] for s in shares)) == len(shares), "Duplicate shares."
secret = reconstitute_secret(shares, M).strip(b"\x00")
if secret.startswith(b"OOO{"):
print("Congrats! You have decoded our secret. We must have trusted you!")
def login():
global USER
global SHAREDIR
print("Welcome to the...")
print(banner)
print('\n'.join(textwrap.wrap("OOO has finally solved the flag sharing problem by making it quick and easy for aspiring cheaters to share flags by utilizing a secure and exciting secret sharing scheme! OOO reserves the right to withhold flag shares where deemed appropriate.", width=80)))
print()
USER = input("Username: ")
assert USER.lower() != "ooo", "No way!"
assert USER.lower() != "zardus", "That's me!"
assert USER.lower() != "malina", "Nope!"
assert re.match(r"^\w+$", USER), "Invalid username format!"
SHAREDIR = os.path.join(os.path.dirname(__file__), "shares", USER)
with contextlib.suppress(FileExistsError):
os.makedirs(SHAREDIR)
main_menu()
def main_menu():
for _ in menu(
*[ f"What do, {USER}?" ] +
[
("Share useless flag.", share_user_flag),
("Redeem useless flag.", redeem_user_flag),
("Store scoring flag.", share_actual_flag),
("Retrieve scoring flag.", redeem_actual_flag)
],
loop=True, done_option=True
):
pass
if not os.path.exists(os.path.join(os.path.dirname(__file__), "prime.ooo")):
print("[STARTUP] Generating prime...")
with open("prime.ooo", 'w') as _f:
_f.write(str(gensafeprime.generate(256)))
if not os.path.exists(os.path.join(os.path.dirname(__file__), "matrix.ooo")):
print("[STARTUP] Generating matrix...")
with open("matrix.ooo", 'w') as _f:
_f.write(str(random_matrix(100, 5)))
P = ast.literal_eval(open("prime.ooo").read().strip())
M = ast.literal_eval(open("matrix.ooo").read().strip())
N = len(M)
K = len(M[0 | ])
def sanity_check(n=N, k=K, m=M):
def one_check(secret):
shares = split_secret(secr | et, n, k, m)
random.shuffle(shares)
new_secret = reconstitute_secret(shares[:k], m)
asser |
agconti/Ember-Demo | ember_demo/config/settings.py | Python | mit | 14,218 | 0.006119 | # -*- coding: utf-8 -*-
"""
Django settings for ember_demo project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from os.path import join
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
try:
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
except ImportError:
# TODO: Fix this where even if in Dev this class is called.
pass
from configurations import Configuration, values
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
class Common(Configuration):
########## APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'south', # Database migration helpers:
'crispy_forms', # Form layouts
'avatar', # for user avatars
'rest_framework',
)
# Apps specific for this project go here.
LOCAL_APPS = (
'users', # custom users app
# Your stuff: custom apps go here
'core',
'api',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
INSTALLED_APPS += (
# Needs to come last for now because of a weird edge case between
# South and allauth
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
)
########## END APP CONFIGURATION
########## MIDDLEWARE CONFIGURATION
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## DEBUG
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = values.BooleanValue(False)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
# In production, this is changed to a values.SecretValue() setting
SECRET_KEY = "CHANGEME!!!"
########## END SECRET CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
join(BASE_DIR, 'fixtures'),
)
########## END FIXTURE CONFIGURATION
########## EMAIL CONFIGURATION
EMAIL_BACKEND = values.Value('django.core.mail.backends.smtp.EmailBackend')
########## END EMAIL CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Agconti', 'andrew@agconti.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = values.DatabaseURLValue('postgres://localhost/ember_demo')
########## END DATABASE CONFIGURATION
########## CACHING
# Do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows.
# memcacheify is what's used in Production
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
########## END CACHING
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'America/Los_Angeles'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END GENERAL CONFIGURATION
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
"allauth.account.context_processors.account",
"allauth.socialaccount.context_processors.socialaccount",
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
# Your stuff: custom template context processers go here
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
join(BASE_DIR, 'templates'),
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
########## END TEMPLATE CONFIGURATION
########## STATIC FILE CONFIGU | RATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = join(os.path.dirname(BASE_DIR), 'staticfiles')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
| join(BASE_DIR, 'static'),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = join(BASE_DIR, 'media')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## URL Configuration
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
########## End URL Configuration
########## AUTHENTICATION CONFIGURATION
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = "username"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
########## END AUTHENTICATION CONFIGURATION
########## Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = "users.User"
LOGIN_REDIRECT_URL = "users:redirect"
########## END Custom user app defaults
########## SLUGLIFIER
AUTOSLUG_SLUGIF |
kyleabeauchamp/pysam | pysam/Pileup.py | Python | mit | 8,975 | 0.000557 | '''Tools for working with files in the samtools pileup -c format.'''
import collections
import pysam
PileupSubstitution = collections.namedtuple("PileupSubstitution",
" ".join((
"chromosome",
"pos",
"reference_base",
"genotype",
"consensus_quality",
"snp_quality",
"mapping_quality",
"coverage",
"read_bases",
"base_qualities")))
PileupIndel = collections.namedtuple("PileupIndel",
" ".join((
"chromosome",
"pos",
"reference_base",
"genotype",
| "consensus_quality",
"snp_quality",
| "mapping_quality",
"coverage",
"first_allele",
"second_allele",
"reads_first",
"reads_second",
"reads_diff")))
def iterate(infile):
'''iterate over ``samtools pileup -c`` formatted file.
*infile* can be any iterator over a lines.
The function yields named tuples of the type :class:`pysam.Pileup.PileupSubstitution`
or :class:`pysam.Pileup.PileupIndel`.
.. note::
The parser converts to 0-based coordinates
'''
conv_subst = (str, lambda x: int(x) - 1, str,
str, int, int, int, int, str, str)
conv_indel = (str, lambda x: int(x) - 1, str, str, int,
int, int, int, str, str, int, int, int)
for line in infile:
d = line[:-1].split()
if d[2] == "*":
try:
yield PileupIndel(*[x(y) for x, y in zip(conv_indel, d)])
except TypeError:
raise pysam.SamtoolsError("parsing error in line: `%s`" % line)
else:
try:
yield PileupSubstitution(*[x(y) for x, y in zip(conv_subst, d)])
except TypeError:
raise pysam.SamtoolsError("parsing error in line: `%s`" % line)
ENCODE_GENOTYPE = {
'A': 'A', 'C': 'C', 'G': 'G', 'T': 'T',
'AA': 'A', 'CC': 'C', 'GG': 'G', 'TT': 'T', 'UU': 'U',
'AG': 'r', 'GA': 'R',
'CT': 'y', 'TC': 'Y',
'AC': 'm', 'CA': 'M',
'GT': 'k', 'TG': 'K',
'CG': 's', 'GC': 'S',
'AT': 'w', 'TA': 'W',
}
DECODE_GENOTYPE = {
'A': 'AA',
'C': 'CC',
'G': 'GG',
'T': 'TT',
'r': 'AG', 'R': 'AG',
'y': 'CT', 'Y': 'CT',
'm': 'AC', 'M': 'AC',
'k': 'GT', 'K': 'GT',
's': 'CG', 'S': 'CG',
'w': 'AT', 'W': 'AT',
}
# ------------------------------------------------------------
def encodeGenotype(code):
'''encode genotypes like GG, GA into a one-letter code.
The returned code is lower case if code[0] < code[1], otherwise
it is uppercase.
'''
return ENCODE_GENOTYPE[code.upper()]
def decodeGenotype(code):
'''decode single letter genotypes like m, M into two letters.
This is the reverse operation to :meth:`encodeGenotype`.
'''
return DECODE_GENOTYPE[code]
def translateIndelGenotypeFromVCF(vcf_genotypes, ref):
'''translate indel from vcf to pileup format.'''
# indels
def getPrefix(s1, s2):
'''get common prefix of strings s1 and s2.'''
n = min(len(s1), len(s2))
for x in range(n):
if s1[x] != s2[x]:
return s1[:x]
return s1[:n]
def getSuffix(s1, s2):
'''get common sufix of strings s1 and s2.'''
n = min(len(s1), len(s2))
if s1[-1] != s2[-1]:
return ""
for x in range(-2, -n - 1, -1):
if s1[x] != s2[x]:
return s1[x + 1:]
return s1[-n:]
def getGenotype(variant, ref):
if variant == ref:
return "*", 0
if len(ref) > len(variant):
# is a deletion
if ref.startswith(variant):
return "-%s" % ref[len(variant):], len(variant) - 1
elif ref.endswith(variant):
return "-%s" % ref[:-len(variant)], -1
else:
prefix = getPrefix(ref, variant)
suffix = getSuffix(ref, variant)
shared = len(prefix) + len(suffix) - len(variant)
# print "-", prefix, suffix, ref, variant, shared, len(prefix), len(suffix), len(ref)
if shared < 0:
raise ValueError()
return "-%s" % ref[len(prefix):-(len(suffix) - shared)], len(prefix) - 1
elif len(ref) < len(variant):
# is an insertion
if variant.startswith(ref):
return "+%s" % variant[len(ref):], len(ref) - 1
elif variant.endswith(ref):
return "+%s" % variant[:len(ref)], 0
else:
prefix = getPrefix(ref, variant)
suffix = getSuffix(ref, variant)
shared = len(prefix) + len(suffix) - len(ref)
if shared < 0:
raise ValueError()
return "+%s" % variant[len(prefix):-(len(suffix) - shared)], len(prefix)
else:
assert 0, "snp?"
# in pileup, the position refers to the base
# after the coordinate, hence subtract 1
# pos -= 1
genotypes, offsets = [], []
is_error = True
for variant in vcf_genotypes:
try:
g, offset = getGenotype(variant, ref)
except ValueError:
break
genotypes.append(g)
if g != "*":
offsets.append(offset)
else:
is_error = False
if is_error:
raise ValueError()
assert len(set(offsets)) == 1, "multiple offsets for indel"
offset = offsets[0]
genotypes = "/".join(genotypes)
return genotypes, offset
def vcf2pileup(vcf, sample):
'''convert vcf record to pileup record.'''
chromosome = vcf.contig
pos = vcf.pos
reference = vcf.ref
allelles = [reference] + vcf.alt
data = vcf[sample]
# get genotype
genotypes = data["GT"]
if len(genotypes) > 1:
raise ValueError("only single genotype per position, %s" % (str(vcf)))
genotypes = genotypes[0]
# not a variant
if genotypes[0] == ".":
return None
genotypes = [allelles[int(x)] for x in genotypes if x != "/"]
# snp_quality is "genotype quality"
snp_quality = consensus_quality = data.get("GQ", [0])[0]
mapping_quality = vcf.info.get("MQ", [0])[0]
coverage = data.get("DP", 0)
if len(reference) > 1 or max([len(x) for x in vcf.alt]) > 1:
# indel
genotype, offset = translateIndelGenotypeFromVCF(genotypes, reference)
return PileupIndel(chromosome,
pos + offset,
"*",
genotype,
consensus_quality,
snp_quality,
mapping_quality,
coverage,
genotype,
"<" * len(genotype),
0,
0,
0)
else:
genotype = encodeGenotype("".join(genotypes))
read_bases = ""
base_qualities = ""
return PileupSubstitution(chromosome, pos, reference,
genotype, consensus_quality,
snp_quality, mapping_quality,
|
DesertBot/DesertBot | desertbot/modules/commands/Join.py | Python | mit | 1,084 | 0.001845 | """
Created on Dec 20, 2011
@author: StarlitGhost
"""
from twisted.plugin import IPlugin
from zope.interface import implementer
from desertbot.message import IRCMessage
from desertbot.moduleinterface import IModule
from desertbot.modules.commandinterface import BotCommand
from desertbot.response import IRCResponse, ResponseType
@implementer(IPlugin, IModule)
class Join(BotCommand):
def triggers(self):
return ['join']
def help(self, query):
return | 'join <channel> - makes the bot join the specified channel(s)'
def execute(self, message: IRCMessage):
if len(message.parameterList) > 0:
responses = []
for param in message.parameterList:
channel = param
if not chan | nel.startswith('#'):
channel = '#' + channel
responses.append(IRCResponse(f'JOIN {channel}', '', ResponseType.Raw))
return responses
else:
return IRCResponse(f"{message.user.nick}, you didn't say where I should join", message.replyTo)
join = Join()
|
mortbauer/openfoam-extend-Breeder-other-scripting-PyFoam | PyFoam/IPythonHelpers/Notebook.py | Python | gpl-2.0 | 6,200 | 0.013387 | # ICE Revision: $Id$
"""Read and create IPython-Notebooks
"""
import json
from copy import deepcopy
from time import asctime
from PyFoam.Error import error,warning
from PyFoam.ThirdParty.six import string_types,text_type,u
class Notebook(object):
"""Class that represents an IPython-notebook in memory"""
def __init__(self,input=None,
nbformat=3,
nbformat_minor=0,
name=None):
"""@param input: If this is a string then it is interpreted as
a filename. Otherwise as a filehandle. If unset then an empty
notebook is contructed
@param name: name of the notebook. Only used if a new notebook is created
"""
self.__content={}
if input==None:
if name==None:
error("Specify at least a name")
self.__content={
u("metadata") : {
u("name"):text_type(name),
u("pyFoam") : {
u("createdBy") : "pyFoam",
u("createdTime") : asctime()
}
},
u("nbformat") : nbformat,
u("nbformat_minor") : nbformat_minor,
u("worksheets") : [
{
u("cells"):[]
}
]
}
else:
if isinstance(input,string_types):
fh=open(input)
else:
fh=input
self.__content=json.load(fh)
if ("metadata" not in self.__content or
"name" not in self.__content["metadata"] or
"nbformat" not in self.__content or
"worksheets" not in self.__content):
error(str(input),"Notebook does not have the expected format")
if len(self.__content["worksheets"])>1:
warning(str(input),"has more than one worksheet. Only using the first")
elif len(self.__content["worksheets"])==0:
error(str(input),"has no worksheets")
if "cells" not in self.__content["worksheets"][0]:
error(str(input),"has no cells")
self.reset([Cell(**c) for c in self])
if u("pyFoam") not in self.__content[u("metadata")]:
self.__content[u("metadata")][u("pyFoam")]={
u("createdBy") : "other",
u("createdTime") : "unknown"
}
@property
def raw(self):
return self.__content
@property
def name(self):
return self.__content["metadata"]["name"]
@name.setter
def name(self,newName):
self.__content["metadata"]["name"]=newName
def _cells(self):
return self.__content["worksheets"][0]["cells"]
def reset(self,new):
self.__content["worksheets"][0]["cells"]=new
def __iter__(self):
for c in self._cells():
yield c
def __len__(self):
return len(self._cells())
def __addCell(self,**kwargs):
data=Cell(**kwargs)
for ct in ["input","source"]:
if ct in data:
if isinstance(data[ct],string_types):
raw=[text_type(l) for l in data[ct].split("\n")]
data[ct]=[l+"\n" for l in raw[:-1]]+raw[-1:]
self._cells().append(data)
def pyFoamMetaData(self):
"""Our very own metadata-dictionary"""
try:
return self.__content["metadata"]["pyFoam"]
except KeyError:
self.__content["metadata"]["pyFoam"]={}
return self.__content["metadata"]["pyFoam"]
def addHeading(self,title,level=1,**kwargs):
self.__addCell(cell_type=u("heading"),
source=title,
level=level,
**kwargs)
def addCode(self,input,collapsed=False,language=u("python"),**kwargs):
self.__addCell(cell_type=u("code"),
collapsed=collapsed,
input=input,
language=text_type(language),
outputs=[],
**kwargs)
def addMarkdown(self,text,**kwargs):
self.__addCell(cell_type=u("markdown"),
source=text,
**kwargs)
def addRaw(self,text,**kwargs):
self.__addCell(cell_type=u("raw"),
source=text,
**kwargs)
def writeToFile(self,fName):
self.__content[u("metadata")][u("pyFoam")][u("modificationTime")]=asctime()
with open(fName,"w") as fh:
json.dump(self.__content,
fh,
indent=1)
class Cell(dict):
"""Wrapper for the dictionaries that represent notebook cells.
Mostly | for conveniently querying metadata"""
def __init__(self,classes=(),pyFoam={},**kwargs):
dict.__init__(self,deepcopy(kwargs))
if not u("metadata") in self:
| self[u("metadata")]={}
if len(classes)>0 or len(pyFoam)>0:
py=deepcopy(pyFoam)
if not "pyFoam" in self[u("metadata")]:
self[u("metadata")]["pyFoam"]=py
else:
self[u("metadata")]["pyFoam"].update(py)
if len(classes)>0:
if isinstance(classes,string_types):
self[u("metadata")]["pyFoam"]["classes"]=(classes,)
else:
cl=deepcopy(classes)
self[u("metadata")]["pyFoam"]["classes"]=tuple(cl)
def meta(self):
return self[u("metadata")]
def isClass(self,name):
"""Checks whether a cell is of a specific class. If a string is passed
the string is checked. Otherwise it is assumed that it is a container
and the """
try:
if isinstance(name,string_types):
return name in self[u("metadata")]["pyFoam"]["classes"]
else:
for n in name:
if n in self[u("metadata")]["pyFoam"]["classes"]:
return True
return False
except KeyError:
return False
|
thomasjm/LyX | po/lyx_pot.py | Python | gpl-2.0 | 27,060 | 0.003806 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# file lyx_pot.py
# This file is part of LyX, the document processor.
# Licence details can be found in the file COPYING.
#
# \author Bo Peng
#
# Full author contact details are available in file CREDITS
# Usage: use
# lyx_pot.py -h
# to get usage message
# This script will extract translatable strings from input files and write
# to output in gettext .pot format.
#
from __future__ import print_function
import sys, os, re, getopt
def relativePath(path, base):
'''return relative path from top source dir'''
# full pathname of path
path1 = os.path.normpath(os.path.realpath(path)).split(os.sep)
path2 = os.path.normpath(os.path.realpath(base)).split(os.sep)
if path1[:len(path2)] != path2:
print("Path %s is not under top source directory" % path)
path3 = os.path.join(*path1[len(path2):]);
# replace all \ by / such that we get the same comments on Windows and *nix
path3 = path3.replace('\\', '/')
return path3
def writeString(outfile, infile, basefile, lineno, string):
string = string.replace('\\', '\\\\').replace('"', '')
if string == "":
return
print('#: %s:%d\nmsgid "%s"\nmsgstr ""\n' % \
(relativePath(infile, basefile), lineno, string), file=outfile)
def ui_l10n(input_files, output, base):
'''Generate pot file from lib/ui/*'''
output = open(output, 'w')
Submenu = re.compile(r'^[^#]*Submenu\s+"([^"]*)"', re.IGNORECASE)
Popupmenu = re.compile(r'^[^#]*PopupMenu\s+"[^"]+"\s+"([^"]*)"', re.IGNORECASE)
IconPalette = re.compile(r'^[^#]*IconPalette\s+"[^"]+"\s+"([^"]*)"', re.IGNORECASE)
Toolbar = re.compile(r'^[^#]*Toolbar\s+"[^"]+"\s+"([^"]*)"', re.IGNORECASE)
Item = re.compile(r'[^#]*Item\s+"([^"]*)"', re.IGNORECASE)
TableInsert = re.compile(r'[^#]*TableInsert\s+"([^"]*)"', re.IGNORECASE)
for src in input_files:
input = open(src)
for lineno, line in enumerate(input.readlines()):
if Submenu.match(line):
(string,) = Submenu.match(line).groups()
string = string.replace('_', ' ')
elif Popupmenu.match(line):
(string,) = Popupmenu.match(line).groups()
elif IconPalette.match(line):
(string,) = IconPalette.match(line).groups()
elif Toolbar.match(line):
(string,) = Toolbar.match(line).groups()
elif Item.match(line):
(string,) = Item.match(line).groups()
elif TableInsert.match(line):
(string,) = TableInsert.match(line).groups()
else:
continue
string = string.replace('"', '')
if string != "":
print('#: %s:%d\nmsgid "%s"\nmsgstr ""\n' % \
(relativePath(src, base), lineno+1, string), file=output)
input.close()
output.close()
def layouts_l10n(input_files, output, base, layouttranslations):
'''Generate pot file from lib/layouts/*.{layout,inc,module}'''
ClassDescription = re.compile(r'^\s*#\s*\\Declare(LaTeX|DocBook)Class.*\{(.*)\}$', re.IGNORECASE)
ClassCategory = re.compile(r'^\s*#\s*\\DeclareCategory\{(.*)\}$', re.IGNORECASE)
Style = re.compile(r'^\s*Style\s+(.*\S)\s*$', re.IGNORECASE)
# match LabelString, EndLabelString, LabelStringAppendix and maybe others but no comments
LabelString = re.compile(r'^[^#]*LabelString\S*\s+(.*\S)\s*$', re.IGNORECASE)
MenuString = re.compile(r'^[^#]*MenuString\S*\s+(.*\S)\s*$', re.IGNORECASE)
Tooltip = re.compile(r'^\s*Tooltip\S*\s+(.*\S)\s*$', re.IGNORECASE)
GuiName = re.compile(r'^\s*GuiName\s+(.*\S)\s*$', re.IGNORECASE)
ListName = re.compile(r'^\s*ListName\s+(.*\S)\s*$', re.IGNORECASE)
CategoryName = re.compile(r'^\s*Category\s+(.*\S)\s*$', re.IGNORECASE)
NameRE = re.compile(r'^\s*#\s*\\DeclareLyXModule.*{(.*)}$', re.IGNORECASE)
InsetLayout = re.compile(r'^InsetLayout\s+\"?(.*)\"?\s*$', re.IGNORECASE)
FlexCheck = re.compile(r'^Flex:(.*)', re.IGNORECASE)
CaptionCheck = re.compile(r'^Caption:(.*)', re.IG | NORECASE)
DescBegin = re.compile(r'^\s*#DescriptionBegin\s*$', re.IGNORECASE)
DescEnd = re.compile(r'^\s*#\s*DescriptionEnd\s*$', re.IGNORECASE)
Category = re.compile(r'^\s*#\s*Category | :\s+(.*\S)\s*$', re.IGNORECASE)
I18nPreamble = re.compile(r'^\s*((Lang)|(Babel))Preamble\s*$', re.IGNORECASE)
EndI18nPreamble = re.compile(r'^\s*End((Lang)|(Babel))Preamble\s*$', re.IGNORECASE)
I18nString = re.compile(r'_\(([^\)]+)\)')
CounterFormat = re.compile(r'^\s*PrettyFormat\s+"?(.*)"?\s*$', re.IGNORECASE)
CiteFormat = re.compile(r'^\s*CiteFormat', re.IGNORECASE)
KeyVal = re.compile(r'^\s*_\w+\s+(.*\S)\s*$')
Float = re.compile(r'^\s*Float\s*$', re.IGNORECASE)
UsesFloatPkg = re.compile(r'^\s*UsesFloatPkg\s+(.*\S)\s*$', re.IGNORECASE)
IsPredefined = re.compile(r'^\s*IsPredefined\s+(.*\S)\s*$', re.IGNORECASE)
End = re.compile(r'^\s*End', re.IGNORECASE)
Comment = re.compile(r'^(.*)#')
Translation = re.compile(r'^\s*Translation\s+(.*\S)\s*$', re.IGNORECASE)
KeyValPair = re.compile(r'\s*"(.*)"\s+"(.*)"')
oldlanguages = []
languages = []
keyset = set()
oldtrans = dict()
if layouttranslations:
linguas_file = os.path.join(base, 'po/LINGUAS')
for line in open(linguas_file).readlines():
res = Comment.search(line)
if res:
line = res.group(1)
if line.strip() != '':
languages.extend(line.split())
# read old translations if available
try:
input = open(output)
lang = ''
for line in input.readlines():
res = Comment.search(line)
if res:
line = res.group(1)
if line.strip() == '':
continue
res = Translation.search(line)
if res:
lang = res.group(1)
if lang not in languages:
oldlanguages.append(lang)
languages.append(lang)
oldtrans[lang] = dict()
continue
res = End.search(line)
if res:
lang = ''
continue
res = KeyValPair.search(line)
if res and lang != '':
key = res.group(1).decode('utf-8')
val = res.group(2).decode('utf-8')
key = key.replace('\\"', '"').replace('\\\\', '\\')
val = val.replace('\\"', '"').replace('\\\\', '\\')
oldtrans[lang][key] = val
keyset.add(key)
continue
print("Error: Unable to handle line:")
print(line)
except IOError:
print("Warning: Unable to open %s for reading." % output)
print(" Old translations will be lost.")
# walon is not a known document language
# FIXME: Do not hardcode, read from lib/languages!
if 'wa' in languages:
languages.remove('wa')
out = open(output, 'w')
for src in input_files:
readingDescription = False
readingI18nPreamble = False
readingFloat = False
readingCiteFormats = False
isPredefined = False
usesFloatPkg = True
listname = ''
floatname = ''
descStartLine = -1
descLines = []
lineno = 0
for line in open(src).readlines():
lineno += 1
res = ClassDescription.search(line)
if res != None:
string = res.group(2)
if not layouttranslations:
writeString(out, src, base, lineno + 1, string)
continue
res = ClassCategory.search(line)
if res != None:
string = res.group(1)
if not layouttranslations:
writeString(out, src, base, lineno + 1, string)
continue
if readingDescription:
|
myles/twtxt-cli | setup.py | Python | mit | 850 | 0.001176 | import os
from setuptools import setup
from twtxtcli import __version__, __project_name__, __project_link__
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name=__project_name__,
version=__v | ersion__,
author='Myles Braithwaite',
author_email='me@mylesbraithwaite.com',
description='',
license='BSD',
keywords='twtxt',
url=__project_lin | k__,
packages=['twtxtcli'],
long_description=read('README'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
install_requires=[
'requests',
'clint',
'humanize',
'iso8601'
],
entry_points={
'console_scripts': [
'twtxt-cli = twtxtcli.cli:main'
]
}
)
|
jcarbaugh/python-rd | rd/core.py | Python | bsd-3-clause | 6,389 | 0 | import datetime
import logging
JRD_TYPES = ('application/json', 'application/xrd+json', 'text/json')
XRD_TYPES = ('application/xrd+xml', 'text/xml')
logger = logging.getLogger("rd")
def _is_str(s):
try:
return isinstance(s, basestring)
except NameError:
return isinstance(s, str)
def loads(content, content_type):
from rd import jrd, xrd
content_type = content_type.split(";")[0]
if content_type in JRD_TYPES:
logger.debug("loads() loading JRD")
return jrd.loads(content)
elif content_type in XRD_TYPES:
logger.debug("loads() loading XRD")
return xrd.loads(content)
#
# special XRD types
#
class Attribute(object):
def __init__(self, name, value):
self.name = name
self.value = value
def __cmp__(self, other):
return cmp(str(self), str(other))
def __eq__(self, other):
return str(self) == other
def __str__(self):
return "%s=%s" % (self.name, self.value)
class Element(object):
def __init__(self, name, value, attrs=None):
self.name = name
self.value = value
self.attrs = attrs or {}
class Title(object):
def __init__(self, value, lang=None):
self.value = value
self.lang = lang
def __cmp__(self, other):
return cmp(str(self), str(other))
def __eq__(self, other):
return str(self) == str(other)
def __str__(self):
if self.lang:
return "%s:%s" % (self.lang, self.value)
return self.value
class Property(object):
def __init__(self, type_, value=None):
self.type = type_
self.value = value
def __cmp__(self, other):
return cmp(str(self), str(other))
def __eq__(self, other):
return str(self) == other
def __str__(self):
if self.value:
return "%s:%s" % (self.type, self.value)
return self.type
#
# special list types
#
class ListLikeObject(list):
def __setitem__(self, key, value):
value = self.item(value)
super(ListLikeObject, self).__setitem__(key, value)
def append(self, value):
value = self.item(value)
super(ListLikeObject, self).append(value)
def extend(self, values):
values = (self.item(value) for value in values)
super(ListLikeObject, self).extend(values)
class AttributeList(ListLikeObject):
def __call__(self, name):
for attr in self:
if attr.name == name:
yield attr
def item(self, value):
if isinstance(value, (list, tuple)):
return Attribute(*value)
elif not isinstance(value, Attribute):
raise ValueError('value must be an instance of Attribute')
return value
class ElementList(ListLikeObject):
def item(self, value):
if not isinstance(value, Element):
raise ValueError('value must be an instance of Type')
return value
class TitleList(ListLikeObject):
def item(self, value):
if _is_str(value):
return Title(value)
elif isinstance(value, (list, tuple)):
return Title(*value)
elif not isinstance(value, Title):
raise ValueError('value must be an instance of Title')
return value
class LinkList(ListLikeObject):
def __call__(self, rel):
for link in self:
if link.rel == rel:
yield link
def item(self, value):
if not isinstance(value, Link):
raise ValueError('value must be an instance of Link')
return value
class PropertyList(ListLikeObject):
def __call__(self, type_):
for prop in self:
if prop.type == type_:
yield prop
def item(self, value):
if _is_str(value):
return Property(value)
elif isinstance(value, (tuple, list)):
return Property(*value)
elif not isinstance(value, Property):
raise ValueError('value must be an instance of Property')
return value
#
# Link object
#
class Link(object):
def __init__(self, rel=None, type=None, href=None, template=None):
self.rel = rel
self.type = type
s | elf.href = href
self.template = template
self._titles = TitleList()
| self._properties = PropertyList()
def get_titles(self):
return self._titles
titles = property(get_titles)
def get_properties(self):
return self._properties
properties = property(get_properties)
#
# main RD class
#
class RD(object):
def __init__(self, xml_id=None, subject=None):
self.xml_id = xml_id
self.subject = subject
self._expires = None
self._aliases = []
self._properties = PropertyList()
self._links = LinkList()
self._signatures = []
self._attributes = AttributeList()
self._elements = ElementList()
# ser/deser methods
def to_json(self):
from rd import jrd
return jrd.dumps(self)
def to_xml(self):
from rd import xrd
return xrd.dumps(self)
# helper methods
def find_link(self, rels, attr=None):
if not isinstance(rels, (list, tuple)):
rels = (rels,)
for link in self.links:
if link.rel in rels:
if attr:
return getattr(link, attr, None)
return link
# custom elements and attributes
def get_elements(self):
return self._elements
elements = property(get_elements)
@property
def attributes(self):
return self._attributes
# defined elements and attributes
def get_expires(self):
return self._expires
def set_expires(self, expires):
if not isinstance(expires, datetime.datetime):
raise ValueError('expires must be a datetime object')
self._expires = expires
expires = property(get_expires, set_expires)
def get_aliases(self):
return self._aliases
aliases = property(get_aliases)
def get_properties(self):
return self._properties
properties = property(get_properties)
def get_links(self):
return self._links
links = property(get_links)
def get_signatures(self):
return self._signatures
signatures = property(get_links)
|
wathsalav/xos | xos/openstack_observer/steps/sync_slivers.py | Python | apache-2.0 | 6,455 | 0.012239 | import os
import base64
import socket
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.sliver import Sliver
from core.models.slice import Slice, SlicePrivilege, ControllerSlice
from core.models.network import Network, NetworkSlice, ControllerNetwork
from util.logger import Logger, logging
from observer.ansible import *
logger = Logger(level=logging.INFO)
def escape(s):
s = s.replace('\n',r'\n').replace('"',r'\"')
return s
class SyncSlivers(OpenStackSyncStep):
provides=[Sliver]
requested_interval=0
observes=Sliver
def get_userdata(self, sliver):
userdata = 'opencloud:\n slicename: "%s"\n hostname: "%s"\n' % (sliver.slice.name, sliver.node.name)
return userdata
def sync_record(self, sliver):
logger.info("sync'ing sliver:%s slice:%s controller:%s " % (sliver, sliver.slice.name, sliver.node.site_deployment.controller))
metadata_update = {}
if (sliver.numberCores):
metadata_update["cpu_cores"] = str(sliver.numberCores)
for tag in sliver.slice.tags.all():
if tag.name.startswith("sysctl-"):
metadata_update[tag.name] = tag.value
# public keys
slice_memberships = SlicePrivilege.objects.filter(slice=sliver.slice)
pubkeys = set([sm.user.public_key for sm in slice_memberships if sm.user.public_key])
if sliver.creator.public_key:
pubkeys.add(sliver.creator.public_key)
if sliver.slice.creator.public_key:
pubkeys.add(sliver.slice.creator.public_key)
nics = []
networks = [ns.network for ns in NetworkSlice.objects.filter(slice=sliver.slice)]
controller_networks = ControllerNetwork.objects.filter(network__in=networks,
controller=sliver.node.site_deployment.controller)
for controller_network in controller_networks:
if controller_network.network.template.visibility == 'private' and \
controller_network.network.template.translation == 'none' and controller_network.net_id:
nics.append(controller_network.net_id)
# now include network template
network_templates = [network.template.shared_network_name for network in networks \
if network.template.shared_network_name]
#driver = self.driver.client_driver(caller=sliver.creator, tenant=sliver.slice.name, controller=sliver.controllerNetwork)
driver = self.driver.admin_driver(tenant='admin', controller=sliver.node.site_deployment.controller)
nets = driver.shell.quantum.list_networks()['networks']
for net in nets:
if net['name'] in network_templates:
nics.append(net['id'])
if (not nics):
for net in nets:
if net['name']=='public':
nics.append(net['id'])
# look up image id
controller_driver = self.driver.admin_driver(controller=sliver.node.site_deployment.controller)
image_id = None
images = controller_driver.shell.glanceclient.images.list()
for image in images:
if image.name == sliver.image.name or not image_id:
image_id = image.id
# look up key name at the controller
# create/fetch keypair
keyname = None
keyname = sliver.creator.email.lower().replace('@', 'AT').replace('.', '') +\
sliver.slice.name
key_fields = {'name': keyname,
'public_key': sliver.creator.public_key}
try:
legacy = Config().observer_legacy
except:
legacy = False
if (legacy):
host_filter = sliver.node.name.split('.',1)[0]
else:
| host_filter = sliver.node.name.strip()
availability_zone_filter = 'nova:%s'%host_filter
sliver_name = '%s-%d'%(sliver.slice.name,sliver.id)
userData = self.get_userdata(sliver)
if sliver.userData:
userData = sliver.userData
controller = sliver.node.site_deployment.controller
tenant_fields = {'endpoint':controller.auth_url,
| 'admin_user': sliver.creator.email,
'admin_password': sliver.creator.remote_password,
'admin_tenant': sliver.slice.name,
'tenant': sliver.slice.name,
'tenant_description': sliver.slice.description,
'name':sliver_name,
'ansible_tag':sliver_name,
'availability_zone': availability_zone_filter,
'image_id':image_id,
'key_name':keyname,
'flavor_id':sliver.flavor.id,
'nics':nics,
'meta':metadata_update,
'key':key_fields,
'user_data':r'%s'%escape(userData)}
res = run_template('sync_slivers.yaml', tenant_fields,path='slivers', expected_num=2)
sliver_id = res[1]['info']['OS-EXT-SRV-ATTR:instance_name'] # 0 is for the key
sliver_uuid = res[1]['id'] # 0 is for the key
try:
hostname = res[1]['info']['OS-EXT-SRV-ATTR:hypervisor_hostname']
ip = socket.gethostbyname(hostname)
sliver.ip = ip
except:
pass
sliver.instance_id = sliver_id
sliver.instance_uuid = sliver_uuid
sliver.instance_name = sliver_name
sliver.save()
def delete_record(self, sliver):
sliver_name = '%s-%d'%(sliver.slice.name,sliver.id)
controller = sliver.node.site_deployment.controller
tenant_fields = {'endpoint':controller.auth_url,
'admin_user': sliver.creator.email,
'admin_password': sliver.creator.remote_password,
'admin_tenant': sliver.slice.name,
'tenant': sliver.slice.name,
'tenant_description': sliver.slice.description,
'name':sliver_name,
'ansible_tag':sliver_name,
'delete': True}
res = run_template('sync_slivers.yaml', tenant_fields,path='slivers')
if (len(res)!=1):
raise Exception('Could not delete sliver %s'%sliver.slice.name)
|
Tesi-Luca-Davide/ryu | ryu/ofproto/oxm_fields.py | Python | apache-2.0 | 13,622 | 0.00022 | # Copyright (C) 2013-2015 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2013-2015 YAMAMOTO Takashi <yamamoto at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# there are two representations of value and mask this module deal with.
#
# "user"
# (value, mask) or value. the latter means no mask.
# value and mask are strings.
#
# "internal"
# value and mask are on-wire bytes.
# mask is None if no mask.
# There are two types of OXM/NXM headers.
#
# 32-bit OXM/NXM header
# +-------------------------------+-------------+-+---------------+
# | class | field |m| length |
# +-------------------------------+-------------+-+---------------+
#
# 64-bit experimenter OXM header
# +-------------------------------+-------------+-+---------------+
# | class (OFPXMC_EXPERIMENTER) | field |m| length |
# +-------------------------------+-------------+-+------- | --------+
# | experimenter ID |
# +---------------------------------------------------------------+
# NOTE: EXT-256 had a variation of experimenter OXM header.
# It has been rectified since then. Currentl | y this implementation
# supports only the old version.
#
# ONF EXT-256 (old, exp_type = 2560)
# +-------------------------------+-------------+-+---------------+
# | class (OFPXMC_EXPERIMENTER) | ????? |m| length |
# +-------------------------------+-------------+-+---------------+
# | experimenter ID (ONF_EXPERIMENTER_ID) |
# +-------------------------------+---------------+---------------+
# | exp_type (PBB_UCA=2560) | pbb_uca |
# +-------------------------------+---------------+
#
# ONF EXT-256 (new, oxm_field = 41)
# +-------------------------------+-------------+-+---------------+
# | class (OFPXMC_EXPERIMENTER) | PBB_UCA=41 |m| length |
# +-------------------------------+-------------+-+---------------+
# | experimenter ID (ONF_EXPERIMENTER_ID) |
# +-------------------------------+---------------+---------------+
# | reserved, should be zero | pbb_uca |
# +-------------------------------+---------------+
import itertools
import struct
from ryu.ofproto import ofproto_common
from ryu.lib.pack_utils import msg_pack_into
from ryu.lib import type_desc
OFPXMC_NXM_0 = 0 # Nicira Extended Match (NXM_OF_)
OFPXMC_NXM_1 = 1 # Nicira Extended Match (NXM_NX_)
OFPXMC_OPENFLOW_BASIC = 0x8000
OFPXMC_PACKET_REGS = 0x8001
OFPXMC_EXPERIMENTER = 0xffff
class _OxmClass(object):
def __init__(self, name, num, type_):
self.name = name
self.oxm_type = num | (self._class << 7)
# TODO(yamamoto): Clean this up later.
# Probably when we drop EXT-256 style experimenter OXMs.
self.num = self.oxm_type
self.type = type_
class OpenFlowBasic(_OxmClass):
_class = OFPXMC_OPENFLOW_BASIC
class PacketRegs(_OxmClass):
_class = OFPXMC_PACKET_REGS
class _Experimenter(_OxmClass):
_class = OFPXMC_EXPERIMENTER
def __init__(self, name, num, type_):
super(_Experimenter, self).__init__(name, num, type_)
self.num = (self.experimenter_id, self.oxm_type)
class ONFExperimenter(_Experimenter):
experimenter_id = ofproto_common.ONF_EXPERIMENTER_ID
class OldONFExperimenter(_Experimenter):
# This class is for the old version of EXT-256
experimenter_id = ofproto_common.ONF_EXPERIMENTER_ID
def __init__(self, name, num, type_):
super(OldONFExperimenter, self).__init__(name, 0, type_)
self.num = (self.experimenter_id, num)
self.exp_type = num
class OpenStateExperimenter(_Experimenter):
experimenter_id = ofproto_common.OPENSTATE_EXPERIMENTER_ID
class NiciraExperimenter(_Experimenter):
experimenter_id = ofproto_common.NX_EXPERIMENTER_ID
class NiciraExtended0(_OxmClass):
"""Nicira Extended Match (NXM_0)
NXM header format is same as 32-bit (non-experimenter) OXMs.
"""
_class = OFPXMC_NXM_0
class NiciraExtended1(_OxmClass):
"""Nicira Extended Match (NXM_1)
NXM header format is same as 32-bit (non-experimenter) OXMs.
"""
_class = OFPXMC_NXM_1
def generate(modname):
import sys
import functools
mod = sys.modules[modname]
def add_attr(k, v):
setattr(mod, k, v)
for i in mod.oxm_types:
uk = i.name.upper()
if isinstance(i.num, tuple):
continue
oxm_class = i.num >> 7
if oxm_class != OFPXMC_OPENFLOW_BASIC:
continue
ofpxmt = i.num & 0x3f
td = i.type
add_attr('OFPXMT_OFB_' + uk, ofpxmt)
add_attr('OXM_OF_' + uk, mod.oxm_tlv_header(ofpxmt, td.size))
add_attr('OXM_OF_' + uk + '_W', mod.oxm_tlv_header_w(ofpxmt, td.size))
name_to_field = dict((f.name, f) for f in mod.oxm_types)
num_to_field = dict((f.num, f) for f in mod.oxm_types)
add_attr('oxm_from_user', functools.partial(_from_user, name_to_field))
add_attr('oxm_from_user_header',
functools.partial(_from_user_header, name_to_field))
add_attr('oxm_to_user', functools.partial(_to_user, num_to_field))
add_attr('oxm_to_user_header',
functools.partial(_to_user_header, num_to_field))
add_attr('_oxm_field_desc', functools.partial(_field_desc, num_to_field))
add_attr('oxm_normalize_user', functools.partial(_normalize_user, mod))
add_attr('oxm_parse', functools.partial(_parse, mod))
add_attr('oxm_parse_header', functools.partial(_parse_header, mod))
add_attr('oxm_serialize', functools.partial(_serialize, mod))
add_attr('oxm_serialize_header', functools.partial(_serialize_header, mod))
add_attr('oxm_to_jsondict', _to_jsondict)
add_attr('oxm_from_jsondict', _from_jsondict)
def _get_field_info_by_name(name_to_field, name):
try:
f = name_to_field[name]
t = f.type
num = f.num
except KeyError:
t = type_desc.UnknownType
if name.startswith('field_'):
num = int(name.split('_')[1])
else:
raise KeyError('unknown match field ' + name)
return num, t
def _from_user_header(name_to_field, name):
(num, t) = _get_field_info_by_name(name_to_field, name)
return num
def _from_user(name_to_field, name, user_value):
(num, t) = _get_field_info_by_name(name_to_field, name)
# the 'list' case below is a bit hack; json.dumps silently maps
# python tuples into json lists.
if isinstance(user_value, (tuple, list)):
(value, mask) = user_value
else:
value = user_value
mask = None
if value is not None:
value = t.from_user(value)
if mask is not None:
mask = t.from_user(mask)
return num, value, mask
def _get_field_info_by_number(num_to_field, n):
try:
f = num_to_field[n]
t = f.type
name = f.name
except KeyError:
t = type_desc.UnknownType
name = 'field_%d' % (n,)
return name, t
def _to_user_header(num_to_field, n):
(name, t) = _get_field_info_by_number(num_to_field, n)
return name
def _to_user(num_to_field, n, v, m):
(name, t) = _get_field_info_by_number(num_to_field, n)
if v is not None:
if hasattr(t, 'size') and t.size != len(v):
raise Exception(
'Unexpected OXM payload length %d for %s (expected %d)'
% (len(v), name, t.size))
value = t.to_user(v)
else:
value = None
if m is None:
user_value = value
else:
user_value = (value, t.to_user(m))
return name, user_value
def _field_desc(num_to_field, n):
|
apporc/neutron | neutron/extensions/quotasv2.py | Python | apache-2.0 | 5,333 | 0 | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_utils import importutils
import webob
from neutron._i18n import _
from neutron.api import extensions
from neutron.api.v2 import attributes
from neutron.api.v2 import base
from neutron.api.v2 import resource
from neutron.common import constants as const
from neutron.common import exceptions as n_exc
from neutron import manager
from neutron import quota
from neutron.quota import resource_registry
from neutron import wsgi
RESOURCE_NAME = 'quota'
RESOURCE_COLLECTION = RESOURCE_NAME + "s"
QUOTAS = quota.QUOTAS
DB_QUOTA_DRIVER = 'neutron.db.quota.driver.DbQuotaDriver'
EXTENDED_ATTRIBUTES_2_0 = {
RESOURCE_COLLECTION: {}
}
class QuotaSetsController(wsgi.Controller):
def __init__(self, plugin):
self._resource_name = RESOURCE_NAME
self._plugin = plugin
self._driver = importutils.import_class(
cfg.CONF.QUOTAS.quota_driver
)
self._update_extended_attributes = True
def _update_attributes(self):
for quota_resource in resource_registry.get_all_resources().keys():
attr_dict = EXTENDED_ATTRIBUTES_2_0[RESOURCE_COLLECTION]
attr_dict[quota_resource] = {
'allow_post': False,
'allow_put': True,
'convert_to': attributes.convert_to_int,
'validate': {'type:range': [-1, const.DB_INTEGER_MAX_VALUE]},
'is_visible': True}
self._update_extended_attributes = False
def _get_quotas(self, request, tenant_id):
return self._driver.get_tenant_quotas(
request.context,
resource_registry.get_all_resources(),
tenant_id)
def create(self, request, body=None):
msg = _( | 'POST requests are not supported on this resource.')
| raise webob.exc.HTTPNotImplemented(msg)
def index(self, request):
context = request.context
self._check_admin(context)
return {self._resource_name + "s":
self._driver.get_all_quotas(
context, resource_registry.get_all_resources())}
def tenant(self, request):
"""Retrieve the tenant info in context."""
context = request.context
if not context.tenant_id:
raise n_exc.QuotaMissingTenant()
return {'tenant': {'tenant_id': context.tenant_id}}
def show(self, request, id):
if id != request.context.tenant_id:
self._check_admin(request.context,
reason=_("Only admin is authorized "
"to access quotas for another tenant"))
return {self._resource_name: self._get_quotas(request, id)}
def _check_admin(self, context,
reason=_("Only admin can view or configure quota")):
if not context.is_admin:
raise n_exc.AdminRequired(reason=reason)
def delete(self, request, id):
self._check_admin(request.context)
self._driver.delete_tenant_quota(request.context, id)
def update(self, request, id, body=None):
self._check_admin(request.context)
if self._update_extended_attributes:
self._update_attributes()
body = base.Controller.prepare_request_body(
request.context, body, False, self._resource_name,
EXTENDED_ATTRIBUTES_2_0[RESOURCE_COLLECTION])
for key, value in body[self._resource_name].items():
self._driver.update_quota_limit(request.context, id, key, value)
return {self._resource_name: self._get_quotas(request, id)}
class Quotasv2(extensions.ExtensionDescriptor):
"""Quotas management support."""
@classmethod
def get_name(cls):
return "Quota management support"
@classmethod
def get_alias(cls):
return RESOURCE_COLLECTION
@classmethod
def get_description(cls):
description = 'Expose functions for quotas management'
if cfg.CONF.QUOTAS.quota_driver == DB_QUOTA_DRIVER:
description += ' per tenant'
return description
@classmethod
def get_updated(cls):
return "2012-07-29T10:00:00-00:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
controller = resource.Resource(
QuotaSetsController(manager.NeutronManager.get_plugin()),
faults=base.FAULT_MAP)
return [extensions.ResourceExtension(
Quotasv2.get_alias(),
controller,
collection_actions={'tenant': 'GET'})]
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
|
eloquence/unisubs | apps/subtitles/workflows.py | Python | agpl-3.0 | 18,336 | 0.000873 | # Amara, universalsubtitles.org
#
# Copyright (C) 2014 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see http://www.gnu.org/licenses/agpl-3.0.html.
"""
Subtitle Workflows
==================
Subtitle workflows control how subtitle sets get edited and published. In
particular they control:
- Work Modes -- Tweak the subtitle editor behavior (for example review mode)
- Actions -- User actions that can be done to subtitle sets (Publish,
Approve, Send back, etc).
- Permissions -- Who can edit subtitles, who can view private subtitles
Workflows
--------_
.. autoclass:: Workflow
:members: get_work_mode, get_actions, action_for_add_subtitles,
get_editor_notes, extra_tabs, get_add_language_mode,
user_can_view_video, user_can_edit_subtitles,
user_can_view_private_subtitles
.. autofunction:: get_workflow(video)
Editor Notes
------------
.. autoclass:: EditorNotes
Work Modes
----------
.. autoclass:: WorkMode
Actions
-------
Actions are things things that users can do to a subtitle set other than
changing the actual subtitles. They correspond to the buttons in the editor
at the bottom of the workflow session (publish, endorse, send back, etc).
Actions can occur alongside changes to the subtitle lines or independent of
them.
.. autoclass:: Action
:members:
.. autoclass:: Publish
"""
from collections import namedtuple
from datetime import datetime, timedelta
from django.utils.translation import ugettext_lazy
from django.utils.translation import ugettext as _
from subtitles import signals
from subtitles.exceptions import ActionError
from subtitles.models import SubtitleNote
from utils.behaviors import behavior
class Workflow(object):
"""
A workflow class controls the overall workflow for editing and publishing
subtitles. Workflows control the work modes, actions, and permissions for
a set of subtitles.
By default, we use a workflow that makes sense for public videos -- Anyone
can edit, the only action is Publish, etc. However, other components can
create custom workflows for specific videos by:
- Creating a Workflow subclass
- Overriding :func:`get_workflow` and returning a custom workflow object
"""
def __init__(self, video):
self.video = video
def get_work_mode(self, user, language_code):
"""Get the work mode to use for an editing session
Args:
user (User): user who is editing
language_code (str): language being edited
Returns:
:class:`WorkMode` object to use
"""
raise NotImplementedError()
def get_actions(self, user, language_code):
"""Get available actions for a user
Args:
user (User): user who is editing
language_code (str): language being edited
Returns:
list of :class:`Action` objects that are available to the user.
"""
raise NotImplementedError()
def action_for_add_subtitles(self, user, language_code, complete):
"""Get an action to use for add_subtitles()
This is used when pipeline.add_subtitles() is called, but not passed
an action. This happens for a couple reasons:
- User saves a draft (in which case complete will be None)
- User is adding subtitles via the API (complete can be True, False,
or None)
Subclasses can override this method if they want to use different
actions to handle this case.
Args:
user (User): user adding subtitles
language_code (str): language being edited
complete (bool or None): complete arg from add_subtitles()
Returns:
Action object or None.
"""
if complete is None:
return None
elif complete:
return APIComplete()
else:
return Unpublish()
def extra_tabs(self, user):
"""Get extra tabs for the videos page
Returns:
list of (name, title) tuples. name is used for the tab id, title
is a human friendly title. For each tab name you should create a
video-<name>.html and video-<name>-tab.html templates. If you
need to pass variables to those templates, create a
setup_tab_<name> method that inputs the same args as the methods
from VideoPageContext and returns a dict of variables for the
template.
"""
return []
def get_add_language_mode(self, user):
"""Control the add new language section of the video page
Args:
user (User): user viewing the page
Returns:
- None/False: Don't display anything
- "<standard>": Use the standard behavior -- a link that opens
the create subtitles dialog.
- any other string: Render this in the section. You probably want
to send the string through mark_safe() to avoid escaping HTML
tags.
"""
return "<standard>"
def get_editor_notes(self, language_code):
"""Get notes to display in the editor
Returns:
:class:`EditorNotes` object
"""
return EditorNotes(self.video, language_code)
def lookup_action(self, user, language_code, action_name):
for action in self.get_actions(user, language_code):
if action.name == | action_name:
retu | rn action
raise LookupError("No action: %s" % action_name)
def perform_action(self, user, language_code, action_name):
"""Perform an action on a subtitle set
This method is used to perform an action by itself, without new
subtitles being added.
"""
action = self.lookup_action(user, language_code, action_name)
subtitle_language = self.video.subtitle_language(language_code)
action.validate(user, self.video, subtitle_language, None)
action.update_language(user, self.video, subtitle_language, None)
action.perform(user, self.video, subtitle_language, None)
def user_can_view_private_subtitles(self, user, language_code):
"""Check if a user can view private subtitles
Private subtitles are subtitles with visibility or visibility_override
set to "private". A typical use is to limit viewing of the subtitles
to members of a team.
Returns:
True/False
"""
raise NotImplementedError()
def user_can_view_video(self, user):
"""Check if a user can view the video
Returns:
True/False
"""
raise NotImplementedError()
def user_can_edit_subtitles(self, user, language_code):
"""Check if a user can edit subtitles
Returns:
True/False
"""
raise NotImplementedError()
def editor_data(self, user, language_code):
"""Get data to pass to the editor for this workflow."""
editor_notes = self.get_editor_notes(language_code)
return {
'work_mode': self.get_work_mode(user, language_code).editor_data(),
'actions': [action.editor_data() for action in
self.get_actions(user, language_code)],
'notesHeading': editor_notes.heading,
'notes': editor_notes.note_editor_data(),
}
def editor_video_urls(self, language_code):
"""Get video URLs to send to the editor."""
video_urls = list(self.video.get_video_urls())
video_url |
agraubert/agutil | agutil/security/__init__.py | Python | mit | 452 | 0 | from .src.securesocket import SecureSocket
| from .src.connection import SecureConnection
from .src.files import encryptFile, decryptFile, encryptFileObj, decryptFileObj
from .src.server import | SecureServer
from .src.cipher import (
configure_cipher,
EncryptionCipher,
DecryptionCipher,
CipherHeader,
Bitmask,
CipherError,
HeaderError,
HeaderLengthError,
InvalidHeaderError,
EncryptionError,
DecryptionError
)
|
andreagrandi/toshl-python | toshl/category.py | Python | mit | 427 | 0 | class | Category(object):
def __init__(self, client):
self.client = client
def list(self):
response = self.client._make_request('/categories')
response = response.json()
return self.client._list_response(response)
def search(self, category_name):
categories = self.list()
for c in categories:
if c['name'] == category_name:
return c['i | d']
|
grg2rsr/xyt_movement_correction | lsm2tiff_batch_converter.py | Python | gpl-2.0 | 441 | 0.011338 | # -*- coding: utf-8 -*-
"""
Created on Wed Jan 14 13:13:02 2015
@author: georg
"""
import IOtools as io
import sys
import os
filelist_file = sys.argv[1]
with open(filelist_file,'r') as fH:
paths = [line.strip() for line in fH.readlines()]
|
for path in paths:
print "processing file: " + path
| outpath = os.path.splitext(path)[0] + '.tif'
io.lsm2tiff(path)
#sys.stdout.write("\n") # move the cursor to the next line |
djscheuf/Machikoro-Simulator | MachikoroSimulator/MachikoroSimulator/Simulator/BatchSimulator.py | Python | mit | 1,597 | 0.001252 | import concurrent.futures
from copy import deepcopy
from .SimulationResult import SimulationResult
class BatchSimulator:
def __init__(self, game, logger, count=1000, batch_size=5):
self._game = game
self._max = count
self._batch_size = batch_size
self._batches = self._max//self._batch_size
self._count = 0
self._winners = {}
self._init_winners()
self._turns = 0
self._logger = logger
def _init_winners(self):
players = self._game.get_players()
for player in players:
self._winners[player] = 0
def run(self):
for i in range(self._batches):
self._run_a_batch()
return SimulationResult(self._count, self._winners, self._turns)
def _run_a_batch(self):
with concurrent.futures.ThreadPoolExecutor(max_workers=self._batch_size) as executor: |
batch = []
for i in range(self._batch_size):
batch.append(executor.submit(self._run_a_game, deepcopy(self._game)))
for future in concurrent.futures.as_completed(batch):
game_result = future.result()
self._increme | nt_win_count(game_result.winner)
self._turns += game_result.turns
self._count += 1
@staticmethod
def _run_a_game(game):
game.reset()
game.run()
return game.get_result()
def _increment_win_count(self, winner):
name = winner.name
self._logger.debug("\tWinner: {0}".format(winner.name))
self._winners[name] += 1
|
MiroK/dolfin | demo/undocumented/extrapolation/python/demo_extrapolation.py | Python | gpl-3.0 | 1,227 | 0 | # Copyright (C) 2010 Anders Logg
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
# |
# First added: 2010-02-08
# Last changed: 2010-02-09
from dolfin import *
# Create mesh and function spaces
mesh = UnitSquareMesh(8, 8)
P1 = FunctionSpace(mesh, "CG", 1)
P2 = FunctionSpace(mesh, "CG", 2)
# Create exact dual
dual = Expression("s | in(5.0*x[0])*sin(5.0*x[1])")
# Create P1 approximation of exact dual
z1 = Function(P1)
z1.interpolate(dual)
# Create P2 approximation from P1 approximation
z2 = Function(P2)
z2.extrapolate(z1)
# Plot approximations
plot(z1, title="z1")
plot(z2, title="z2")
interactive()
|
jdemel/gnuradio | gnuradio-runtime/python/gnuradio/gru/seq_with_cursor.py | Python | gpl-3.0 | 1,933 | 0.008795 | #
# Copyright 2003,2004 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
# misc utilities
fro | m __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import types
class seq_with_cursor (object):
| __slots__ = [ 'items', 'index' ]
def __init__ (self, items, initial_index = None, initial_value = None):
assert len (items) > 0, "seq_with_cursor: len (items) == 0"
self.items = items
self.set_index (initial_index)
if initial_value is not None:
self.set_index_by_value(initial_value)
def set_index (self, initial_index):
if initial_index is None:
self.index = len (self.items) / 2
elif initial_index >= 0 and initial_index < len (self.items):
self.index = initial_index
else:
raise ValueError
def set_index_by_value(self, v):
"""
Set index to the smallest value such that items[index] >= v.
If there is no such item, set index to the maximum value.
"""
self.set_index(0) # side effect!
cv = self.current()
more = True
while cv < v and more:
cv, more = next(self) # side effect!
def __next__ (self):
new_index = self.index + 1
if new_index < len (self.items):
self.index = new_index
return self.items[new_index], True
else:
return self.items[self.index], False
def prev (self):
new_index = self.index - 1
if new_index >= 0:
self.index = new_index
return self.items[new_index], True
else:
return self.items[self.index], False
def current (self):
return self.items[self.index]
def get_seq (self):
return self.items[:] # copy of items
|
SickGear/SickGear | lib/enzyme/language.py | Python | gpl-3.0 | 15,177 | 0 | # -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
# Copyright 2003-2006 Dirk Meyer <dischi@freevo.org>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANT | Y; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License f | or more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
import re
from six import string_types
__all__ = ['resolve']
def resolve(code):
"""
Transform the given (2- or 3-letter) language code to a human readable
language name. The return value is a 2-tuple containing the given
language code and the language name. If the language code cannot be
resolved, name will be 'Unknown (<code>)'.
"""
if not code:
return None, None
if not isinstance(code, string_types):
raise ValueError('Invalid language code specified by parser')
# Take up to 3 letters from the code.
code = re.split(r'[^a-z]', code.lower())[0][:3]
for spec in codes:
if code in spec[:-1]:
return code, spec[-1]
return code, u'Unknown (%r)' % code
# Parsed from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
codes = (
('aar', 'aa', u'Afar'),
('abk', 'ab', u'Abkhazian'),
('ace', u'Achinese'),
('ach', u'Acoli'),
('ada', u'Adangme'),
('ady', u'Adyghe'),
('afa', u'Afro-Asiatic '),
('afh', u'Afrihili'),
('afr', 'af', u'Afrikaans'),
('ain', u'Ainu'),
('aka', 'ak', u'Akan'),
('akk', u'Akkadian'),
('alb', 'sq', u'Albanian'),
('ale', u'Aleut'),
('alg', u'Algonquian languages'),
('alt', u'Southern Altai'),
('amh', 'am', u'Amharic'),
('ang', u'English, Old '),
('anp', u'Angika'),
('apa', u'Apache languages'),
('ara', 'ar', u'Arabic'),
('arc', u'Official Aramaic '),
('arg', 'an', u'Aragonese'),
('arm', 'hy', u'Armenian'),
('arn', u'Mapudungun'),
('arp', u'Arapaho'),
('art', u'Artificial '),
('arw', u'Arawak'),
('asm', 'as', u'Assamese'),
('ast', u'Asturian'),
('ath', u'Athapascan languages'),
('aus', u'Australian languages'),
('ava', 'av', u'Avaric'),
('ave', 'ae', u'Avestan'),
('awa', u'Awadhi'),
('aym', 'ay', u'Aymara'),
('aze', 'az', u'Azerbaijani'),
('bad', u'Banda languages'),
('bai', u'Bamileke languages'),
('bak', 'ba', u'Bashkir'),
('bal', u'Baluchi'),
('bam', 'bm', u'Bambara'),
('ban', u'Balinese'),
('baq', 'eu', u'Basque'),
('bas', u'Basa'),
('bat', u'Baltic '),
('bej', u'Beja'),
('bel', 'be', u'Belarusian'),
('bem', u'Bemba'),
('ben', 'bn', u'Bengali'),
('ber', u'Berber '),
('bho', u'Bhojpuri'),
('bih', 'bh', u'Bihari'),
('bik', u'Bikol'),
('bin', u'Bini'),
('bis', 'bi', u'Bislama'),
('bla', u'Siksika'),
('bnt', u'Bantu '),
('bos', 'bs', u'Bosnian'),
('bra', u'Braj'),
('bre', 'br', u'Breton'),
('btk', u'Batak languages'),
('bua', u'Buriat'),
('bug', u'Buginese'),
('bul', 'bg', u'Bulgarian'),
('bur', 'my', u'Burmese'),
('byn', u'Blin'),
('cad', u'Caddo'),
('cai', u'Central American Indian '),
('car', u'Galibi Carib'),
('cat', 'ca', u'Catalan'),
('cau', u'Caucasian '),
('ceb', u'Cebuano'),
('cel', u'Celtic '),
('cha', 'ch', u'Chamorro'),
('chb', u'Chibcha'),
('che', 'ce', u'Chechen'),
('chg', u'Chagatai'),
('chi', 'zh', u'Chinese'),
('chk', u'Chuukese'),
('chm', u'Mari'),
('chn', u'Chinook jargon'),
('cho', u'Choctaw'),
('chp', u'Chipewyan'),
('chr', u'Cherokee'),
('chu', 'cu', u'Church Slavic'),
('chv', 'cv', u'Chuvash'),
('chy', u'Cheyenne'),
('cmc', u'Chamic languages'),
('cop', u'Coptic'),
('cor', 'kw', u'Cornish'),
('cos', 'co', u'Corsican'),
('cpe', u'Creoles and pidgins, English based '),
('cpf', u'Creoles and pidgins, French-based '),
('cpp', u'Creoles and pidgins, Portuguese-based '),
('cre', 'cr', u'Cree'),
('crh', u'Crimean Tatar'),
('crp', u'Creoles and pidgins '),
('csb', u'Kashubian'),
('cus', u'Cushitic '),
('cze', 'cs', u'Czech'),
('dak', u'Dakota'),
('dan', 'da', u'Danish'),
('dar', u'Dargwa'),
('day', u'Land Dayak languages'),
('del', u'Delaware'),
('den', u'Slave '),
('dgr', u'Dogrib'),
('din', u'Dinka'),
('div', 'dv', u'Divehi'),
('doi', u'Dogri'),
('dra', u'Dravidian '),
('dsb', u'Lower Sorbian'),
('dua', u'Duala'),
('dum', u'Dutch, Middle '),
('dut', 'nl', u'Dutch'),
('dyu', u'Dyula'),
('dzo', 'dz', u'Dzongkha'),
('efi', u'Efik'),
('egy', u'Egyptian '),
('eka', u'Ekajuk'),
('elx', u'Elamite'),
('eng', 'en', u'English'),
('enm', u'English, Middle '),
('epo', 'eo', u'Esperanto'),
('est', 'et', u'Estonian'),
('ewe', 'ee', u'Ewe'),
('ewo', u'Ewondo'),
('fan', u'Fang'),
('fao', 'fo', u'Faroese'),
('fat', u'Fanti'),
('fij', 'fj', u'Fijian'),
('fil', u'Filipino'),
('fin', 'fi', u'Finnish'),
('fiu', u'Finno-Ugrian '),
('fon', u'Fon'),
('fre', 'fr', u'French'),
('frm', u'French, Middle '),
('fro', u'French, Old '),
('frr', u'Northern Frisian'),
('frs', u'Eastern Frisian'),
('fry', 'fy', u'Western Frisian'),
('ful', 'ff', u'Fulah'),
('fur', u'Friulian'),
('gaa', u'Ga'),
('gay', u'Gayo'),
('gba', u'Gbaya'),
('gem', u'Germanic '),
('geo', 'ka', u'Georgian'),
('ger', 'de', u'German'),
('gez', u'Geez'),
('gil', u'Gilbertese'),
('gla', 'gd', u'Gaelic'),
('gle', 'ga', u'Irish'),
('glg', 'gl', u'Galician'),
('glv', 'gv', u'Manx'),
('gmh', u'German, Middle High '),
('goh', u'German, Old High '),
('gon', u'Gondi'),
('gor', u'Gorontalo'),
('got', u'Gothic'),
('grb', u'Grebo'),
('grc', u'Greek, Ancient '),
('gre', 'el', u'Greek, Modern '),
('grn', 'gn', u'Guarani'),
('gsw', u'Swiss German'),
('guj', 'gu', u'Gujarati'),
('gwi', u"Gwich'in"),
('hai', u'Haida'),
('hat', 'ht', u'Haitian'),
('hau', 'ha', u'Hausa'),
('haw', u'Hawaiian'),
('heb', 'he', u'Hebrew'),
('her', 'hz', u'Herero'),
('hil', u'Hiligaynon'),
('him', u'Himachali'),
('hin', 'hi', u'Hindi'),
('hit', u'Hittite'),
('hmn', u'Hmong'),
('hmo', 'ho', u'Hiri Motu'),
('hsb', u'Upper Sorbian'),
('hun', 'hu', u'Hungarian'),
('hup', u'Hupa'),
('iba', u'Iban'),
('ibo', 'ig', u'Igbo'),
('ice', 'is', u'Icelandic'),
('ido', 'io', u'Ido'),
('iii', 'ii', u'Sichuan Yi'),
('ijo', u'Ijo languages'),
('iku', 'iu', u'Inuktitut'),
('ile', 'ie', u'Interlingue'),
('ilo', u'Iloko'),
('ina', 'ia', u'Interlingua '),
('inc', u'Indic '),
('ind', 'id', u'Indonesian'),
('ine', u'Indo-European '),
('inh', u'Ingush'),
('ipk', 'ik', u'Inupiaq'),
('ira', u'Iranian '),
('iro', u'Iroquoian languages'),
('ita', 'it', u'Italian'),
('jav', 'jv', u'Javanese'),
('jbo', u'Lojban'),
('jpn', 'ja', u'Japanese'),
('jpr', u'Judeo-Persian'),
('jrb', u'Judeo-Arabic'),
('kaa', u'Kara-Kalpak'),
('kab', u'Kabyle'),
('kac', u'Kachin'),
('kal', 'kl', u'Kalaallisut'),
('kam', u'Kamba'),
('kan', 'kn', u'Kannada'),
('kar', u'Karen languages'),
('kas', 'ks', u'Kashmiri'),
('kau', 'kr', u'Kanuri'),
('kaw', u'Kawi'),
('kaz', 'kk', u'Kazakh'),
('kbd', u'Kabardian'),
('kha', u'Khasi'),
('khi', u'Khoisan '),
('khm', 'km', u'Central Khmer'),
('kho', u'Khotanese'),
('kik', 'ki', u'Kikuyu'),
('kin', 'rw', u'Kinyarwanda'),
('kir', 'ky', u'Kirghiz'),
('kmb', u'Kimbundu'),
('kok', u'Konkani'),
('kom', 'kv', u'Komi'),
('kon', 'kg', u'Kongo'),
('kor', 'ko', u'Korean'),
('kos', u'Kosraean'),
('kpe', u'Kpelle'),
('krc', u'Karachay-Balkar'),
('krl' |
redhat-openstack/heat | heat/openstack/common/loopingcall.py | Python | apache-2.0 | 4,501 | 0 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from eventlet import event
from eventlet import greenthread
from heat.openstack.common.gettextutils import _LE, _LW
from heat.openstack.common import log as logging
from heat.openstack.common import timeutils
LOG = logging.getLogger(__name__)
class LoopingCallDone(Exception):
"""Exception to break out and stop a LoopingCallBase.
The poll-function passed to LoopingCallBase can raise this exception to
break out of the loop normally. This is somewhat analogous to
StopIteration.
An optional return-value can be included as the argument to the exception;
this return-value will be returned by LoopingCallBase.wait()
"""
def __init__(self, retvalue=True):
""":param retvalue: Value that LoopingCallBase.wait() should return."""
self.retvalue = retvalue
class LoopingCallBase(object):
def __init__(self, f=None, *args, **kw):
self.args = args
self.kw = kw
self.f = f
self._running = False
self.done = None
def stop(self):
self._running = False
def wait(self):
| return self.done.wait()
class FixedIntervalLoopingCall(LoopingCallBase):
"""A fixed interval looping call."""
def start(self, interval, initial_delay=None):
self._running = True
done = event.Event()
def _inner():
if initial_delay:
greenthread.sleep(initial_delay)
try:
while self._running:
start = timeutils.utcnow()
se | lf.f(*self.args, **self.kw)
end = timeutils.utcnow()
if not self._running:
break
delay = interval - timeutils.delta_seconds(start, end)
if delay <= 0:
LOG.warn(_LW('task run outlasted interval by %s sec') %
-delay)
greenthread.sleep(delay if delay > 0 else 0)
except LoopingCallDone as e:
self.stop()
done.send(e.retvalue)
except Exception:
LOG.exception(_LE('in fixed duration looping call'))
done.send_exception(*sys.exc_info())
return
else:
done.send(True)
self.done = done
greenthread.spawn_n(_inner)
return self.done
class DynamicLoopingCall(LoopingCallBase):
"""A looping call which sleeps until the next known event.
The function called should return how long to sleep for before being
called again.
"""
def start(self, initial_delay=None, periodic_interval_max=None):
self._running = True
done = event.Event()
def _inner():
if initial_delay:
greenthread.sleep(initial_delay)
try:
while self._running:
idle = self.f(*self.args, **self.kw)
if not self._running:
break
if periodic_interval_max is not None:
idle = min(idle, periodic_interval_max)
LOG.debug('Dynamic looping call sleeping for %.02f '
'seconds', idle)
greenthread.sleep(idle)
except LoopingCallDone as e:
self.stop()
done.send(e.retvalue)
except Exception:
LOG.exception(_LE('in dynamic looping call'))
done.send_exception(*sys.exc_info())
return
else:
done.send(True)
self.done = done
greenthread.spawn(_inner)
return self.done
|
dperconti/hexwick_python | hexwick/migrations/0001_initial.py | Python | agpl-3.0 | 3,992 | 0.002004 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='PlaidAccount',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('institution_type', models.CharField(max_length=100)),
('account_type', models.CharField(max_length=15)),
('account_name', models.CharField(max_length=200)),
('account_last_four_number', models.CharField(max_length=5)),
('current_balance', models.CharField(max_length=200)),
| ('available_balance', models.CharF | ield(max_length=200)),
('_user', models.CharField(max_length=200)),
('_item', models.CharField(max_length=200)),
('_id', models.CharField(max_length=200)),
('access_token', models.CharField(max_length=200)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PlaidObject',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('plaid_type', models.CharField(default=None, max_length=10)),
('plaid_ID', models.CharField(default=None, max_length=10)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PlaidTransaction',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('_account', models.CharField(max_length=200)),
('_id', models.CharField(max_length=100)),
('amount', models.FloatField()),
('date', models.DateTimeField(verbose_name=b'Transaction Date')),
('name', models.CharField(max_length=50)),
('is_pending', models.BooleanField(default=None)),
('category', models.CharField(default=b'Uncategorized', max_length=200)),
('category_id', models.CharField(default=b'Uncategorized', max_length=200)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PlaidUser',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='plaidtransaction',
name='_plaid_user',
field=models.ForeignKey(default=None, to='hexwick.PlaidUser'),
preserve_default=True,
),
migrations.AddField(
model_name='plaidobject',
name='user',
field=models.ForeignKey(default=None, to='hexwick.UserProfile'),
preserve_default=True,
),
migrations.AddField(
model_name='plaidaccount',
name='plaid_user',
field=models.ForeignKey(to='hexwick.PlaidUser'),
preserve_default=True,
),
]
|
lxy235/lserver | src/Common/Lang/zh_cn.py | Python | apache-2.0 | 1,569 | 0.026793 | # !/usr/bin/python
# coding=utf-8
#
# @Author: LiXiaoYu
# @Time: 2013-10-17
# @Info: lang
_lang = {
'_MODULE_NOT_EXIST_':'无法加载模块',
'_ERROR_ACTION_':'非法操作 | ',
'_LANGUAGE_NOT_L | OAD_':'无法加载语言包',
'_TEMPLATE_NOT_EXIST_':'模板不存在',
'_MODULE_':'模块',
'_ACTION_':'操作',
'_ACTION_NOT_EXIST_':'控制器不存在或者没有定义',
'_MODEL_NOT_EXIST_':'模型不存在或者没有定义',
'_VALID_ACCESS_':'没有权限',
'_XML_TAG_ERROR_':'XML标签语法错误',
'_DATA_TYPE_INVALID_':'非法数据对象!',
'_OPERATION_WRONG_':'操作出现错误',
'_NOT_LOAD_DB_':'无法加载数据库',
'_NOT_SUPPORT_DB_':'系统暂时不支持数据库',
'_NO_DB_CONFIG_':'没有定义数据库配置',
'_NOT_SUPPERT_':'系统不支持',
'_CACHE_TYPE_INVALID_':'无法加载缓存类型',
'_FILE_NOT_WRITEABLE_':'目录(文件)不可写',
'_METHOD_NOT_EXIST_':'您所请求的方法不存在!',
'_CLASS_NOT_EXIST_':'实例化一个不存在的类!',
'_CLASS_CONFLICT_':'类名冲突',
'_TEMPLATE_ERROR_':'模板引擎错误',
'_CACHE_WRITE_ERROR_':'缓存文件写入失败!',
'_TAGLIB_NOT_EXIST_':'标签库未定义',
'_OPERATION_FAIL_':'操作失败!',
'_OPERATION_SUCCESS_':'操作成功!',
'_SELECT_NOT_EXIST_':'记录不存在!',
'_EXPRESS_ERROR_':'表达式错误',
'_TOKEN_ERROR_':'表单令牌错误',
'_RECORD_HAS_UPDATE_':'记录已经更新',
'_NOT_ALLOW_PHP_':'模板禁用PHP代码',
}
|
rbaumg/trac | trac/web/tests/api.py | Python | bsd-3-clause | 31,977 | 0.000125 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2019 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import io
import os.path
import textwrap
import unittest
from trac import perm
from trac.core import TracError
from trac.test import EnvironmentStub, MockPerm, mkdtemp, rmtree
from trac.util import create_file
from trac.util.datefmt import utc
from trac.util.html import tag
from trac.web.api import HTTPBadRequest, HTTPInternalServerError, Request, \
RequestDone, parse_arg_list
from trac.web.main import FakeSession
from tracopt.perm.authz_policy import AuthzPolicy
class RequestHandlerPermissionsTestCaseBase(unittest.TestCase):
authz_policy = None
def setUp(self, module_class):
self.path = mkdtemp()
if self.authz_policy is not None:
self.authz_file = os.path.join(self.path, 'authz_policy.conf')
create_file(self.authz_file, self.authz_policy)
self.env = EnvironmentStub(enable=['trac.*', AuthzPolicy],
path=self.path)
self.env.config.set('authz_policy', 'authz_file', self.authz_file)
self.env.config.set('trac', 'permission_policies',
'AuthzPolicy, DefaultPermissionPolicy')
else:
self.env = EnvironmentStub(path=self.path)
self.req_handler = module_class(self.env)
def tearDown(self):
self.env.reset_db_and_disk()
def get_navigation_items(self, req):
return self.req_handler.get_navigation_items(req)
def grant_perm(self, username, *actions):
permsys = perm.PermissionSystem(self.env)
for action in actions:
permsys.grant_permission(username, action)
| def process_request(self, req):
self.assertTrue(self.req_handler.match_request(req))
return self.req_handler.process_request(req)
def _make_environ(scheme='http', server_name='example.org',
server_port=80, method='GET', script_name='/trac',
**kwargs):
environ = {'wsgi.url_scheme': scheme, 'wsgi.input': io.BytesIO(),
'REQUEST_METHOD': method, 'SERVER_N | AME': server_name,
'SERVER_PORT': server_port, 'SCRIPT_NAME': script_name}
environ.update(kwargs)
return environ
def _make_req(environ, authname='admin', chrome=None, form_token='A' * 40,
locale=None, perm=MockPerm(), tz=utc, use_xsendfile=False,
xsendfile_header='X-Sendfile'):
if chrome is None:
chrome = {
'links': {},
'scripts': [],
'theme': 'theme.html',
'logo': '',
'nav': ''
}
class RequestWithSentAttrs(Request):
"""Subclass of `Request` with "sent" attributes."""
def __init__(self, environ):
self.status_sent = []
self.headers_sent = {}
self._response_sent = io.BytesIO()
def write(data):
self._response_sent.write(data)
def start_response(status, headers, exc_info=None):
self.status_sent.append(status)
self.headers_sent.update(dict(headers))
return write
super(RequestWithSentAttrs, self).__init__(environ, start_response)
@property
def response_sent(self):
return self._response_sent.getvalue()
req = RequestWithSentAttrs(environ)
# Setup default callbacks.
req.authname = authname
req.chrome = chrome
req.form_token = form_token
req.locale = locale
req.perm = perm
req.session = FakeSession()
req.tz = tz
req.use_xsendfile = use_xsendfile
req.xsendfile_header = xsendfile_header
return req
class RequestTestCase(unittest.TestCase):
def test_repr_with_path(self):
environ = _make_environ(PATH_INFO='/path')
req = Request(environ, None)
self.assertEqual(repr(req), """<Request "GET '/path'">""")
def test_repr_with_path_and_query_string(self):
environ = _make_environ(QUERY_STRING='A=B', PATH_INFO='/path')
req = Request(environ, None)
self.assertEqual(repr(req), """<Request "GET '/path?A=B'">""")
def test_get(self):
qs = 'arg1=0&arg2=1&arg1=abc&arg3=def&arg3=1'
environ = _make_environ(method='GET', QUERY_STRING=qs)
req = Request(environ, None)
self.assertEqual('0', req.args.get('arg1'))
self.assertEqual('def', req.args.get('arg3'))
def test_getfirst(self):
qs = 'arg1=0&arg2=1&arg1=abc&arg3=def&arg3=1'
environ = _make_environ(method='GET', QUERY_STRING=qs)
req = Request(environ, None)
self.assertEqual('0', req.args.getfirst('arg1'))
self.assertEqual('def', req.args.getfirst('arg3'))
def test_get_list(self):
qs = 'arg1=0&arg2=1&arg1=abc&arg3=def&arg3=1'
environ = _make_environ(method='GET', QUERY_STRING=qs)
req = Request(environ, None)
self.assertEqual(['0', 'abc'], req.args.getlist('arg1'))
self.assertEqual(['def', '1'], req.args.getlist('arg3'))
def test_as_bool(self):
qs = 'arg1=0&arg2=1&arg3=yes&arg4=a&arg5=1&arg5=0'
environ = _make_environ(method='GET', QUERY_STRING=qs)
req = Request(environ, None)
self.assertIsNone(req.args.as_bool('arg0'))
self.assertTrue(req.args.as_bool('arg0', True))
self.assertFalse(req.args.as_bool('arg1'))
self.assertFalse(req.args.as_bool('arg1', True))
self.assertTrue(req.args.as_bool('arg2'))
self.assertTrue(req.args.as_bool('arg3'))
self.assertFalse(req.args.as_bool('arg4'))
self.assertTrue(req.args.as_bool('arg4', True))
self.assertTrue(req.args.as_bool('arg5'))
def test_as_int(self):
qs = 'arg1=1&arg2=a&arg3=3&arg3=4'
environ = _make_environ(method='GET', QUERY_STRING=qs)
req = Request(environ, None)
self.assertIsNone(req.args.as_int('arg0'))
self.assertEqual(2, req.args.as_int('arg0', 2))
self.assertEqual(1, req.args.as_int('arg1'))
self.assertEqual(1, req.args.as_int('arg1', 2))
self.assertEqual(2, req.args.as_int('arg1', min=2))
self.assertEqual(2, req.args.as_int('arg1', None, 2))
self.assertEqual(0, req.args.as_int('arg1', max=0))
self.assertEqual(0, req.args.as_int('arg1', None, max=0))
self.assertEqual(0, req.args.as_int('arg1', None, -1, 0))
self.assertIsNone(req.args.as_int('arg2'))
self.assertEqual(2, req.args.as_int('arg2', 2))
self.assertEqual(3, req.args.as_int('arg3'))
def test_getbool(self):
qs = 'arg1=0&arg2=1&arg3=yes&arg4=a&arg5=1&arg5=0'
environ = _make_environ(method='GET', QUERY_STRING=qs)
req = Request(environ, None)
self.assertIsNone(req.args.getbool('arg0'))
self.assertTrue(req.args.getbool('arg0', True))
self.assertFalse(req.args.getbool('arg1'))
self.assertFalse(req.args.getbool('arg1', True))
self.assertTrue(req.args.getbool('arg2'))
self.assertTrue(req.args.getbool('arg3'))
with self.assertRaises(HTTPBadRequest):
req.args.getbool('arg4')
with self.assertRaises(HTTPBadRequest):
req.args.getbool('arg4', True)
with self.assertRaises(HTTPBadRequest):
req.args.getbool('arg5')
with self.assertRaises(HTTPBadRequest):
req.args.getbool('arg5', True)
def test_getint(self):
qs = 'arg1=1&arg2=a&arg3=3&arg3=4'
environ = _make_environ(method='GET', QUERY_STRING=qs)
req = Request(environ, None)
self.assertIsNone(req.args.getint('arg0'))
self.assertEqual(2, req.args.getint('arg0', 2))
self.a |
shoopio/shoop | shuup/discounts/migrations/0007_delete_fields.py | Python | agpl-3.0 | 2,742 | 0.001459 | # Generated by Django 2.2.18 on 2021-07-28 20:41
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('discounts', '0006_migrate_discounts'),
]
operations = [
migrations.RemoveField(
model_name='couponcode',
name='created_by',
),
migrations.RemoveField(
model_name='couponcode',
name='modified_by',
),
migrations.RemoveField(
model_name='couponcode',
name='shops',
),
migrations.RemoveField(
model_name='couponusage',
name='coupon',
),
migrations.RemoveField(
model_name='couponusage',
name='order',
),
migrations.RemoveField(
model_name='discount',
name='availability_exceptions' | ,
),
migrations.RemoveField(
model_name='discount',
name='coupon_code',
),
migrations.RemoveField(
model_name='discount',
name='exclude_selected_contact_group',
),
migrations.RemoveField(
model_na | me='discount',
name='shops',
),
migrations.RemoveField(
model_name='happyhour',
name='shops',
),
migrations.AlterField(
model_name='discount',
name='created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='discounts_created_by', to=settings.AUTH_USER_MODEL, verbose_name='created by'),
),
migrations.AlterField(
model_name='discount',
name='modified_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='discounts_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='modified by'),
),
migrations.AlterField(
model_name='discount',
name='shop',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='shop_discounts', to='shuup.Shop', verbose_name='shop'),
),
migrations.AlterField(
model_name='happyhour',
name='shop',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shuup.Shop', verbose_name='shop'),
),
migrations.DeleteModel(
name='AvailabilityException',
),
migrations.DeleteModel(
name='CouponCode',
),
migrations.DeleteModel(
name='CouponUsage',
),
]
|
all-of-us/raw-data-repository | rdr_service/lib_fhir/fhirclient_3_0_0/models/testscript.py | Python | bsd-3-clause | 50,708 | 0.008381 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.0.0.11832 (http://hl7.org/fhir/StructureDefinition/TestScript) on 2017-03-22.
# 2017, SMART Health IT.
from . import domainresource
class TestScript(domainresource.DomainResource):
""" Describes a set of tests.
A structured set of tests against a FHIR server implementation to determine
compliance against the FHIR specification.
"""
resource_type = "TestScript"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.contact = None
""" Contact details for the publisher.
List of `ContactDetail` items (represented as `dict` in JSON). """
self.copyright = None
""" Use and/or publishing restrictions.
Type `str`. """
self.date = None
""" Date this was last changed.
Type `FHIRDate` (represented as `str` in JSON). """
self.description = None
""" Natural language description of the test script.
Type `str`. """
self.destination = None
""" An abstract server representing a destination or receiver in a
message exchange.
List of `TestScriptDestination` items (represented as `dict` in JSON). """
self.experimental = None
""" For testing purposes, not real usage.
Type `bool`. """
self.fixture = None
""" Fixture in the test script - by reference (uri).
List of `TestScriptFixture` items (represented as `dict` in JSON). """
self.identifier = None
""" Additional identifier for the test script.
Type `Identifier` (represented as `dict` in JSON). """
self.jurisdiction = None
""" Intended jurisdiction for test script (if applicable).
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.metadata = None
""" Required capability that is assumed to function correctly on the
FHIR server being tested.
Type `TestScriptMetadata` (represented as `dict` in JSON). """
self.name = None
""" Name for this test script (computer friendly).
Type `str`. """
self.origin = None
""" An abstract server representing a client or sender in a message
exchange.
List of `TestScriptOrigin` items (represented as `dict` in JSON). """
self.profile = None
""" Reference of the validation profile.
List of `FHIRReference` items referencing `Resource` (represented as `dict` in JSON). """
self.publisher = None
""" Name of the publisher (organization or individual).
Type `str`. """
self.purpose = None
""" Why this test script is defined.
Type `str`. """
self.rule = None
""" Assert rule used within the test script.
List of `TestScriptRule` items (represented as `dict` in JSON). """
self.ruleset = None
""" Assert ruleset used within the test script.
List of `TestScriptRuleset` items (represented as `dict` in JSON). """
self.setup = None
""" A series of required setup operations before tests are executed.
Type `TestScriptSetup` (represented as `dict` in JSON). """
self.status = None
""" draft | active | retired | unknown.
Type `str`. """
self.teardown = None
""" A series of required clean up steps.
Type `TestScriptTeardown` (represented as `dict` in JSON). """
self.test = None
""" A test in this script.
List of `TestScriptTest` items (represented as `dict` in JSON). """
self.title = None
""" Name for this test script (human friendly).
Type `str`. """
self.url = None
""" Logical URI to reference this test script (globally unique).
Type `str`. """
self.useContext = None
""" Context the content is intended to support.
List of `UsageContext` items (represented as `dict` in JSON). """
self.variable = None
""" Placeholder for evaluated elements.
List | of `TestScriptVariable` items (represented as `dict` in JSON). """
self.version = None
""" Business version of the test script.
Type `str | `. """
super(TestScript, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScript, self).elementProperties()
js.extend([
("contact", "contact", contactdetail.ContactDetail, True, None, False),
("copyright", "copyright", str, False, None, False),
("date", "date", fhirdate.FHIRDate, False, None, False),
("description", "description", str, False, None, False),
("destination", "destination", TestScriptDestination, True, None, False),
("experimental", "experimental", bool, False, None, False),
("fixture", "fixture", TestScriptFixture, True, None, False),
("identifier", "identifier", identifier.Identifier, False, None, False),
("jurisdiction", "jurisdiction", codeableconcept.CodeableConcept, True, None, False),
("metadata", "metadata", TestScriptMetadata, False, None, False),
("name", "name", str, False, None, True),
("origin", "origin", TestScriptOrigin, True, None, False),
("profile", "profile", fhirreference.FHIRReference, True, None, False),
("publisher", "publisher", str, False, None, False),
("purpose", "purpose", str, False, None, False),
("rule", "rule", TestScriptRule, True, None, False),
("ruleset", "ruleset", TestScriptRuleset, True, None, False),
("setup", "setup", TestScriptSetup, False, None, False),
("status", "status", str, False, None, True),
("teardown", "teardown", TestScriptTeardown, False, None, False),
("test", "test", TestScriptTest, True, None, False),
("title", "title", str, False, None, False),
("url", "url", str, False, None, True),
("useContext", "useContext", usagecontext.UsageContext, True, None, False),
("variable", "variable", TestScriptVariable, True, None, False),
("version", "version", str, False, None, False),
])
return js
from . import backboneelement
class TestScriptDestination(backboneelement.BackboneElement):
""" An abstract server representing a destination or receiver in a message
exchange.
An abstract server used in operations within this test script in the
destination element.
"""
resource_type = "TestScriptDestination"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.index = None
""" The index of the abstract destination server starting at 1.
Type `int`. """
self.profile = None
""" FHIR-Server | FHIR-SDC-FormManager | FHIR-SDC-FormReceiver | FHIR-
SDC-FormProcessor.
Type `Coding` (represented as `dict` in JSON). """
super(TestScriptDestination, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptDestination, self).elementProperties()
js.extend([
|
reschly/cryptopals | prob3.py | Python | apache-2.0 | 2,592 | 0.021991 | #!/usr/bin/env python
# Written against python 3.3.1
# Matasano Problem 3
# The hex encoded string:
# 1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736
# has been XOR'd against a single character. Find the key, decrypt
# the message.
# Some of the 'magic' in this comes from a college crypto course
# That course used Stinson's Cryptography book, 3rd edition
from prob1 import hexToRaw, rawToHexLUT
from prob2 import hex_xor
import string
letterFrequency = {}
letterFrequency['A'] = .082;
letterFrequency['B'] = .015;
letterFrequency['C'] = .028;
letterFrequency['D'] = .043;
letterFrequency['E'] = .127;
letterFrequency['F'] = .022;
letterFrequency['G'] = .020;
letterFrequency['H'] = .061;
letterFrequency['I'] = .070;
letterFrequency['J'] = .002;
letterFrequency['K'] = .008;
letterFrequency['L'] = .040;
letterFrequency['M'] = .024;
letterFrequency['N'] = .067;
letterFrequency['O'] = .075;
letterFrequency['P'] = .019;
letterFrequency['Q'] = .001;
letterFrequency['R'] = .060;
le | tterFrequency['S'] = .063;
letterFrequency['T'] = .091;
letterFrequency['U'] = .028;
letterFrequency['V'] = .010;
letterFrequency['W'] = .023;
letterFrequency['X'] = .001;
letterFrequency['Y'] = .020;
letterFrequency['Z'] = .001;
letterFrequenc | y[' '] = .200;
# See page 35, Stinson
def calculateMG(plain):
counts = [];
for i in range(256):
counts.append(0);
for i in range(len(plain)):
if (plain[i] < 128):
counts[ord(chr(plain[i]).upper())] += 1;
result = 0.0;
for i in string.ascii_uppercase:
result += letterFrequency[i]*counts[ord(i)];
result += letterFrequency[' '] * counts[ord(' ')];
result /= len(plain);
return result;
def tryKey(cipher, key):
fullkey = key * len(cipher);
fullkey = fullkey[:len(cipher)];
potential_plain = hex_xor(cipher, fullkey);
return calculateMG(hexToRaw(potential_plain)), potential_plain;
def findGoodKeys(cipher):
for i in range(256):
mg, plain = tryKey(cipher, rawToHexLUT[i]);
#print(str(i) + ": " + str(mg));
if (mg > .050):
print("potential key: 0x" + rawToHexLUT[i]);
print("Potential hex(plain): " + str(plain).lstrip("b'").rstrip("'"));
print("potential plaintext: " + str(hexToRaw(str(plain).lstrip("b'").rstrip("'"))).lstrip("b'").rstrip("'"));
if __name__ == "__main__":
cip = b'1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736';
findGoodKeys(cip);
|
the13fools/Bokeh_Examples | plotting/file/relative_paths.py | Python | bsd-3-clause | 1,023 | 0.00391 | # All of the other examples directly embed the Javascript and CSS code for
# Bokeh's client-side runtime into the HTML. This leads to the HTML files
# being rather large. An alternative is to ask Bokeh to produce HTML that
# has a relative link to the Bokeh Javascript and CSS. This is easy to
# do; you just pass in a few extra arguments to the output_file() command.
import numpy as np
from bokeh.plotting import *
N = 100
x = np.linspace(0, 4*n | p.pi, N)
y = np.sin(x)
output_file("relative_paths.html", title="Relative path example", mode="relative")
scatter(x,y, color="#FF00FF", tools="pan,wheel_zoom,box_zoom,reset,previewsave")
show()
# By default, the URLs for the Javascript and CSS will be relative to
# the current directory, i.e. th | e directory in which the HTML file is
# generated. You can provide a different "root" directory from which
# the relative paths will be computed:
#
# output_file("scatter.html", title="scatter.py example",
# resources="relative", rootdir="some/other/path")
|
ckclark/leetcode | py/valid-triangle-number.py | Python | apache-2.0 | 931 | 0.003222 | from collections import Counter
class Solution(object):
def triangleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
| nums = filter(None, nums)
if not nums:
return 0
c = Counter(nums)
N = max(nums)
buckets = [0] * (N + 1)
for k, cnt in c.iteritems():
buckets[k] += cnt
for i in xrange(1, N + 1):
buckets[i] += buckets[i - 1]
s = sorted(c)
ans = 0
for i, n1 in enumerate(s):
for j in xrange(i):
n2 = s[j]
n1_n2 = n1 + n2
| ans += c[n1] * c[n2] * (buckets[min(n1_n2 - 1, N)] - buckets[n1])
ans += c[n2] * (c[n1] - 1) * c[n1] / 2
ans += c[n1] * (c[n1] - 1) * (c[n1] - 2) / 6
ans += c[n1] * (c[n1] - 1) / 2 * (buckets[min(n1 * 2 - 1, N)] - buckets[n1])
return ans
|
ChristopherGS/sensor_readings | app/sensors/models.py | Python | bsd-3-clause | 3,951 | 0.004303 | from datetime import datetime
from app.data import CRUDMixin, db
class Experiment(CRUDMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
hardware = db.Column(db.Text)
t_stamp = db.Column(db.DateTime)
label = db.Column(db.Text)
sensors = db.relationship('Sensor', backref='experiment', lazy='dynamic')
def __repr__(self):
return '<Experiment %r>, <id %r>' % (self.hardware, self.id)
class Sensor(CRUDMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
SENSOR_TYPE = db.Column(db.Text)
ACCELEROMETER_X = db.Column(db.Text)
ACCELEROMETER_Y = db.Column(db.Text)
ACCELEROMETER_Z = db.Column(db.Text)
X_AXIS = db.Column(db.Text)
Y_AXIS = db.Column(db.Text)
Z_AXIS = db.Column(db.Text)
LINEAR_ACCELERATION_X = db.Column(db.Text)
LINEAR_ACCELERATION_Y = db.Column(db.Text)
LINEAR_ACCELERATION_Z = db.Column(db.Text)
GYROSCOPE_X = db.Column(db.Text)
GYROSCOPE_Y = db.Column(db.Text)
GYROSCOPE_Z = db.Column(db.Text)
ORIENTATION_Z = db.Column(db.Text)
ORIENTATION_X = db.Column(db.Text)
ORIENTATION_Y = db.Column(db.Text)
Time_since_start = db.Column(db.Text)
state = db.Column(db.Text)
timestamp = db.Column(db.DateTime)
prediction = db.Column(db.Text)
experiment_id = db.Column(db.Integer, db.ForeignKey('experiment.id'))
def __init__(self, experiment, SENSOR_TYPE=None, ACCELEROMETER_X=None,
ACCELEROMETER_Y=None, ACCELEROMETER_Z=None,
X_AXIS=None, Y_AXIS=None,
Z_AXIS=None, LINEAR_ACCELERATION_X=None,
LINEAR_ACCELERATION_Y=None, LINEAR_ACCELERATION_Z=None,
GYROSCOPE_X=None, GYROSCOPE_Y=None,
GYROSCOPE_Z=None,
ORIENTATION_Z=None, ORIENTATION_X=None,
ORIENTATION_Y=None,
Time_since_start=None, state=None,
timestamp=None, prediction=None
):
self.SENSOR_TYPE = SENSOR_TYPE
self.ACCELEROMETER_X = ACCELEROMETER_X
self.ACCELEROMETER_Y = ACCELEROMETER_Y
self.ACCELEROMETER_Z = ACCELEROMETER_Z
self.X_AXIS = X_AXIS
self.Y_AXIS = Y_AXIS
self.Z_AXIS = | Z_AXIS
self.LINEAR_ACCELERATION_X = LINEAR_ACCELERATION_X
| self.LINEAR_ACCELERATION_Y = LINEAR_ACCELERATION_Y
self.LINEAR_ACCELERATION_Z = LINEAR_ACCELERATION_Z
self.GYROSCOPE_X = GYROSCOPE_X
self.GYROSCOPE_Y = GYROSCOPE_Y
self.GYROSCOPE_Z = GYROSCOPE_Z
self.ORIENTATION_Z = ORIENTATION_Z
self.ORIENTATION_X = ORIENTATION_X
self.ORIENTATION_Y = ORIENTATION_Y
self.Time_since_start = Time_since_start
self.state = state
self.timestamp = timestamp
self.prediction = prediction # not in uploaded files
self.experiment_id = experiment.id # not in uploaded files
def __repr__(self):
return '<Timestamp {:d}>'.format(self.timestamp)
class Site(CRUDMixin, db.Model):
__tablename__ = 'tracking_site'
base_url = db.Column(db.String)
visits = db.relationship('Visit', backref='site', lazy='select')
user_id = db.Column(db.Integer, db.ForeignKey('users_user.id'))
def __repr__(self):
return '<Site {:d} {}>'.format(self.id, self.base_url)
def __str__(self):
return self.base_url
class Visit(CRUDMixin, db.Model):
__tablename__ = 'tracking_visit'
browser = db.Column(db.String)
date = db.Column(db.DateTime)
event = db.Column(db.String)
url = db.Column(db.String)
ip_address = db.Column(db.String)
location = db.Column(db.String)
latitude = db.Column(db.Numeric)
longitude = db.Column(db.Numeric)
site_id = db.Column(db.Integer, db.ForeignKey('tracking_site.id'))
def __repr__(self):
r = '<Visit for site ID {:d}: {} - {:%Y-%m-%d %H:%M:%S}>'
return r.format(self.site_id, self.url, self.date)
|
azverkan/scons | test/Deprecated/SourceCode/CVS/CVS.py | Python | mit | 11,399 | 0.005527 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test fetching source files from CVS.
"""
import os
import TestSCons
test = TestSCons.TestSCons(match = TestSCons.match_re_dotall)
test.write('SConscript', """
Environment(tools = ['CVS']).CVS('')
""")
msg_cvs = """The CVS() factory is deprecated and there is no replacement."""
warn_cvs = test.deprecated_fatal('deprecated-build-dir', msg_cvs)
msg_sc = """SourceCode() has been deprecated and there is no replacement.
\tIf you need this function, please contact dev@scons.tigris.org."""
warn_sc = test.deprecated_wrap(msg_sc)
cvs = test.where_is('cvs')
if not cvs:
test.skip_test("Could not find 'cvs'; skipping remaining tests.\n")
test.subdir('CVS', 'import', ['import', 'sub'], 'work1', 'work2')
foo_aaa_in = os.path.join('foo', 'aaa.in')
foo_bbb_in = os.path.join('foo', 'bbb.in')
foo_ccc_in = os.path.join('foo', 'ccc.in')
foo_sub_ddd_in = os.path.join('foo', 'sub', 'ddd.in')
foo_sub_ddd_out = os.path.join('foo', 'sub', 'ddd.out')
foo_sub_eee_in = os.path.join('foo', 'sub', 'eee.in')
foo_sub_eee_out = os.path.join('foo', 'sub', 'eee.out')
foo_sub_fff_in = os.path.join('foo', 'sub', 'fff.in')
foo_sub_fff_out = os.path.join('foo', 'sub', 'fff.out')
foo_sub_all = os.path.join('foo', 'sub', 'all')
sub_SConscript = os.path.join('sub', 'SConscript')
sub_ddd_in = os.path.join('sub', 'ddd.in')
sub_ddd_out = os.path.join('sub', 'ddd.out')
sub_eee_in = os.path.join('sub', 'eee.in')
sub_eee_out = os.path.join('sub', 'eee.out')
sub_fff_in = os.path.join('sub', 'fff.in')
sub_fff_out = os.path.join('sub', 'fff.out')
sub_all = os.path.join('sub', 'all')
# Set up the CVS repository.
cvsroot = test.workpath('CVS')
os.environ['CVSROOT'] = cvsroot
test.run(program = | cvs, arguments = 'init')
test.write(['import', 'aaa.in'], "import/aaa.in\n")
test.write(['import', 'bbb.in'], "import/bbb.in\n")
test.write(['import', 'ccc.in'], "i | mport/ccc.in\n")
test.write(['import', 'sub', 'SConscript'], """\
Import("env")
env.Cat('ddd.out', 'ddd.in')
env.Cat('eee.out', 'eee.in')
env.Cat('fff.out', 'fff.in')
env.Cat('all', ['ddd.out', 'eee.out', 'fff.out'])
""")
test.write(['import', 'sub', 'ddd.in'], "import/sub/ddd.in\n")
test.write(['import', 'sub', 'eee.in'], "import/sub/eee.in\n")
test.write(['import', 'sub', 'fff.in'], "import/sub/fff.in\n")
test.run(chdir = 'import',
program = cvs,
arguments = '-q import -m import foo v v-r')
# Test the most straightforward CVS checkouts, using the module name.
test.write(['work1', 'SConstruct'], """
SetOption('warn', 'deprecated-source-code')
import os
def cat(env, source, target):
target = str(target[0])
f = open(target, "wb")
for src in source:
f.write(open(str(src), "rb").read())
f.close()
env = Environment(ENV = { 'PATH' : os.environ['PATH'],
'EDITOR' : os.environ.get('EDITOR', 'ed') },
BUILDERS={'Cat':Builder(action=cat)})
env.Prepend(CVSFLAGS='-Q')
env.Cat('aaa.out', 'foo/aaa.in')
env.Cat('bbb.out', 'foo/bbb.in')
env.Cat('ccc.out', 'foo/ccc.in')
env.Cat('all', ['aaa.out', 'bbb.out', 'ccc.out'])
env.SourceCode('.', env.CVS(r'%(cvsroot)s'))
SConscript('foo/sub/SConscript', "env")
""" % locals())
test.subdir(['work1', 'foo'])
test.write(['work1', 'foo', 'bbb.in'], "work1/foo/bbb.in\n")
test.subdir(['work1', 'foo', 'sub',])
test.write(['work1', 'foo', 'sub', 'eee.in'], "work1/foo/sub/eee.in\n")
read_str = """\
cvs -Q -d %(cvsroot)s co foo/sub/SConscript
""" % locals()
build_str = """\
cvs -Q -d %(cvsroot)s co foo/aaa.in
cat(["aaa.out"], ["%(foo_aaa_in)s"])
cat(["bbb.out"], ["%(foo_bbb_in)s"])
cvs -Q -d %(cvsroot)s co foo/ccc.in
cat(["ccc.out"], ["%(foo_ccc_in)s"])
cat(["all"], ["aaa.out", "bbb.out", "ccc.out"])
cvs -Q -d %(cvsroot)s co foo/sub/ddd.in
cat(["%(foo_sub_ddd_out)s"], ["%(foo_sub_ddd_in)s"])
cat(["%(foo_sub_eee_out)s"], ["%(foo_sub_eee_in)s"])
cvs -Q -d %(cvsroot)s co foo/sub/fff.in
cat(["%(foo_sub_fff_out)s"], ["%(foo_sub_fff_in)s"])
cat(["%(foo_sub_all)s"], ["%(foo_sub_ddd_out)s", "%(foo_sub_eee_out)s", "%(foo_sub_fff_out)s"])
""" % locals()
stdout = test.wrap_stdout(read_str = read_str, build_str = build_str)
test.run(chdir = 'work1',
arguments = '.',
stdout = TestSCons.re_escape(stdout),
stderr = warn_cvs + warn_sc)
# Checking things back out of CVS apparently messes with the line
# endings, so read the result files in non-binary mode.
test.must_match(['work1', 'all'],
"import/aaa.in\nwork1/foo/bbb.in\nimport/ccc.in\n",
mode='r')
test.must_match(['work1', 'foo', 'sub', 'all'],
"import/sub/ddd.in\nwork1/foo/sub/eee.in\nimport/sub/fff.in\n",
mode='r')
test.must_be_writable(test.workpath('work1', 'foo', 'sub', 'SConscript'))
test.must_be_writable(test.workpath('work1', 'foo', 'aaa.in'))
test.must_be_writable(test.workpath('work1', 'foo', 'ccc.in'))
test.must_be_writable(test.workpath('work1', 'foo', 'sub', 'ddd.in'))
test.must_be_writable(test.workpath('work1', 'foo', 'sub', 'fff.in'))
# Test CVS checkouts when the module name is specified.
test.write(['work2', 'SConstruct'], """
SetOption('warn', 'deprecated-source-code')
import os
def cat(env, source, target):
target = str(target[0])
f = open(target, "wb")
for src in source:
f.write(open(str(src), "rb").read())
f.close()
env = Environment(ENV = { 'PATH' : os.environ['PATH'],
'EDITOR' : os.environ.get('EDITOR', 'ed') },
BUILDERS={'Cat':Builder(action=cat)})
env.Prepend(CVSFLAGS='-q')
env.Cat('aaa.out', 'aaa.in')
env.Cat('bbb.out', 'bbb.in')
env.Cat('ccc.out', 'ccc.in')
env.Cat('all', ['aaa.out', 'bbb.out', 'ccc.out'])
env.SourceCode('.', env.CVS(r'%(cvsroot)s', 'foo'))
SConscript('sub/SConscript', "env")
""" % locals())
test.write(['work2', 'bbb.in'], "work2/bbb.in\n")
test.subdir(['work2', 'sub'])
test.write(['work2', 'sub', 'eee.in'], "work2/sub/eee.in\n")
read_str = """\
cvs -q -d %(cvsroot)s co -d sub foo/sub/SConscript
U sub/SConscript
""" % locals()
build_str = """\
cvs -q -d %(cvsroot)s co -d . foo/aaa.in
U ./aaa.in
cat(["aaa.out"], ["aaa.in"])
cat(["bbb.out"], ["bbb.in"])
cvs -q -d %(cvsroot)s co -d . foo/ccc.in
U ./ccc.in
cat(["ccc.out"], ["ccc.in"])
cat(["all"], ["aaa.out", "bbb.out", "ccc.out"])
cvs -q -d %(cvsroot)s co -d sub foo/sub/ddd.in
U sub/ddd.in
cat(["%(sub_ddd_out)s"], ["%(sub_ddd_in)s"])
cat(["%(sub_eee_out)s"], ["%(sub_eee_in)s"])
cvs -q -d %(cvsroot)s co -d sub foo/sub/fff.in
U sub/fff.in
cat(["%(sub_fff_out)s"], ["%(sub_fff_in)s"])
cat(["%(sub_all)s"], ["%(sub_ddd_out)s", "%(sub_eee_out)s", "%(sub_fff_out)s"])
""" % locals()
stdout = test.wrap_stdout(read_str = read_str, build_str = build_str)
test.run(chdir = 'work2',
arguments = '.',
stdout = TestSCons.re_escape(stdout),
stderr = warn_cvs + warn_sc)
# Checking things back out of CVS apparently messes with the line
# endings, so read the result files in non-binary mode.
test.must_match(['work2', 'all'],
"import/aaa.in\nwo |
tylertian/Openstack | openstack F/cinder/cinder/api/openstack/volume/views/limits.py | Python | apache-2.0 | 3,478 | 0.000288 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010-2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from cinder.openstack.common import timeutils
class ViewBuilder(object):
"""OpenStack API base limits view builder."""
def build(self, rate_limits, absolute_limits):
rate_limits = self._build_rate_limits(rate_limits)
absolute_limits = self._build_absolute_limits(absolute_limits)
output = {
"limits": {
"rat | e": rate_limits,
"absolute": absolute_limits,
},
}
return output
def _build_absolute_limits(self, absolute_limits):
"""Builder for absolute limits
absolute_limits should be given as a dict of limits.
For example: {"ram": 512, "gigabytes": 1024}.
"""
limit_names = {
"ram": ["maxTotalRAMSize"],
"instances": ["maxTotalInstances"],
| "cores": ["maxTotalCores"],
"gigabytes": ["maxTotalVolumeGigabytes"],
"volumes": ["maxTotalVolumes"],
"key_pairs": ["maxTotalKeypairs"],
"floating_ips": ["maxTotalFloatingIps"],
"metadata_items": ["maxServerMeta", "maxImageMeta"],
"injected_files": ["maxPersonality"],
"injected_file_content_bytes": ["maxPersonalitySize"],
}
limits = {}
for name, value in absolute_limits.iteritems():
if name in limit_names and value is not None:
for name in limit_names[name]:
limits[name] = value
return limits
def _build_rate_limits(self, rate_limits):
limits = []
for rate_limit in rate_limits:
_rate_limit_key = None
_rate_limit = self._build_rate_limit(rate_limit)
# check for existing key
for limit in limits:
if (limit["uri"] == rate_limit["URI"] and
limit["regex"] == rate_limit["regex"]):
_rate_limit_key = limit
break
# ensure we have a key if we didn't find one
if not _rate_limit_key:
_rate_limit_key = {
"uri": rate_limit["URI"],
"regex": rate_limit["regex"],
"limit": [],
}
limits.append(_rate_limit_key)
_rate_limit_key["limit"].append(_rate_limit)
return limits
def _build_rate_limit(self, rate_limit):
_get_utc = datetime.datetime.utcfromtimestamp
next_avail = _get_utc(rate_limit["resetTime"])
return {
"verb": rate_limit["verb"],
"value": rate_limit["value"],
"remaining": int(rate_limit["remaining"]),
"unit": rate_limit["unit"],
"next-available": timeutils.isotime(at=next_avail),
}
|
zero-rp/miniblink49 | third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/port/mock_drt.py | Python | apache-2.0 | 12,040 | 0.002243 | # Copyright (c) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
This is an implementation of the Port interface that overrides other
ports and changes the Driver binary to "MockDRT".
The MockDRT objects emulate what a real DRT would do. In particular, they
return the output a real DRT would return for a given test, assuming that
test actually passes (except for reftests, which currently cause the
MockDRT to crash).
"""
import base64
import logging
import optparse
import os
import sys
import types
# Since we execute this script directly as part of the unit tests, we need to ensure
# that Tools/Scripts is in sys.path for the next imports to work correctly.
script_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
if script_dir not in sys.path:
sys.path.append(script_dir)
from webkitpy.common import read_checksum_from_png
from webkitpy.common.system.systemhost import SystemHost
from webkitpy.layout_tests.port.driver import DriverInput, DriverOutput
from webkitpy.layout_tests.port.factory import PortFactory
_log = logging.getLogger(__name__)
class MockDRTPort(object):
port_name = 'mock'
@classmethod
def determine_full_port_name(cls, host, options, port_name):
return port_name
def __init__(self, host, port_name, **kwargs):
self.__delegate = PortFactory(host).get(port_name.replace('mock-', ''), **kwargs)
self.__delegate_driver_class = self.__delegate._driver_class
self.__delegate._driver_class = types.MethodType(self._driver_class, self.__delegate)
def __getattr__(self, name):
return getattr(self.__delegate, name)
def check_build(self, needs_http, printer):
return True
def check_sys_deps(self, needs_http):
return True
def _driver_class(self, delegate):
return self._mocked_driver_maker
def _mocked_driver_maker(self, port, worker_number, pixel_tests, no_timeout=False):
path_to_this_file = self.host.filesystem.abspath(__file__.replace('.pyc', '.py'))
driver = self.__delegate_driver_class()(self, worker_number, pixel_tests, no_timeout)
driver.cmd_line = self._overriding_cmd_line(driver.cmd_line,
self.__delegate._path_to_driver(),
s | ys.executable,
path_to_this_file,
self.__delegate.name())
return driver
@staticmethod
def _overriding_cmd_line(original_cmd_line, driver_pat | h, python_exe, this_file, port_name):
def new_cmd_line(pixel_tests, per_test_args):
cmd_line = original_cmd_line(pixel_tests, per_test_args)
index = cmd_line.index(driver_path)
cmd_line[index:index + 1] = [python_exe, this_file, '--platform', port_name]
return cmd_line
return new_cmd_line
def start_helper(self):
pass
def start_http_server(self, additional_dirs, number_of_servers):
pass
def start_websocket_server(self):
pass
def acquire_http_lock(self):
pass
def stop_helper(self):
pass
def stop_http_server(self):
pass
def stop_websocket_server(self):
pass
def release_http_lock(self):
pass
def _make_wdiff_available(self):
self.__delegate._wdiff_available = True
def setup_environ_for_server(self, server_name):
env = self.__delegate.setup_environ_for_server()
# We need to propagate PATH down so the python code can find the checkout.
env['PATH'] = os.environ['PATH']
return env
def lookup_virtual_test_args(self, test_name):
suite = self.__delegate.lookup_virtual_suite(test_name)
return suite.args + ['--virtual-test-suite-name', suite.name, '--virtual-test-suite-base', suite.base]
def lookup_virtual_reference_args(self, test_name):
suite = self.__delegate.lookup_virtual_suite(test_name)
return suite.reference_args + ['--virtual-test-suite-name', suite.name, '--virtual-test-suite-base', suite.base]
def main(argv, host, stdin, stdout, stderr):
"""Run the tests."""
options, args = parse_options(argv)
drt = MockDRT(options, args, host, stdin, stdout, stderr)
return drt.run()
def parse_options(argv):
# We do custom arg parsing instead of using the optparse module
# because we don't want to have to list every command line flag DRT
# accepts, and optparse complains about unrecognized flags.
def get_arg(arg_name):
if arg_name in argv:
index = argv.index(arg_name)
return argv[index + 1]
return None
options = optparse.Values({
'actual_directory': get_arg('--actual-directory'),
'platform': get_arg('--platform'),
'virtual_test_suite_base': get_arg('--virtual-test-suite-base'),
'virtual_test_suite_name': get_arg('--virtual-test-suite-name'),
})
return (options, argv)
class MockDRT(object):
def __init__(self, options, args, host, stdin, stdout, stderr):
self._options = options
self._args = args
self._host = host
self._stdout = stdout
self._stdin = stdin
self._stderr = stderr
port_name = None
if options.platform:
port_name = options.platform
self._port = PortFactory(host).get(port_name=port_name, options=options)
self._driver = self._port.create_driver(0)
def run(self):
while True:
line = self._stdin.readline()
if not line:
return 0
driver_input = self.input_from_line(line)
dirname, basename = self._port.split_test(driver_input.test_name)
is_reftest = (self._port.reference_files(driver_input.test_name) or
self._port.is_reference_html_file(self._port._filesystem, dirname, basename))
output = self.output_for_test(driver_input, is_reftest)
self.write_test_output(driver_input, output, is_reftest)
def input_from_line(self, line):
vals = line.strip().split("'")
uri = vals[0]
checksum = None
should_run_pixel_tests = False
if len(vals) == 2 and vals[1] == '--pixel-test':
should_run_pixel_tests = True
elif len(vals) == 3 and vals[1] == '--pixel-test':
should_run_pixel_tests = True
checksum = vals[2]
elif len(vals) != 1:
raise NotImplementedError
if uri.startswith('http://') or uri.startswith('https://'):
|
IEEEDTU/CMS | Course/migrations/0006_auto_20160410_1530.py | Python | mit | 572 | 0.001748 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-04-10 10:00
from __future__ import unicode_liter | als
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Course', '0005_coursegroup'),
]
operations = [
migrations.AlterField(
model_name='department',
name='hod',
| field=models.ForeignKey(default=False, on_delete=django.db.models.deletion.CASCADE, related_name='head_of_dept', to='Profiler.Faculty'),
),
]
|
skitazaki/python-school-ja | src/datestring_convert.py | Python | mit | 783 | 0 | #!/usr/bin/env python
# | -*- coding: utf-8 -*-
import datetime
def datestring_convert(s):
"""Convert datetime string which appears in subversion commit log.
"""
assert type(s) == str, "Argument must be string"
dt = s.split()
year, month, day = map(int, dt[0].split("-"))
hour, minute, second = map(int, dt[1].split(":"))
return datetime.datetime(year, month, day, hour, minute, second)
if __name__ == '__main__':
TEST_1 = "2012-01-14 07:56:02"
TEST_2 = "2012-01-14 04:46:30 +0900" |
d1 = datestring_convert(TEST_1)
d2 = datestring_convert(TEST_2)
diff = d1 - d2
print("{} ==> {}".format(TEST_1, TEST_2))
print("DIFF: days={}, seconds={}".format(diff.days, diff.seconds))
# vim: set et ts=4 sw=4 cindent fileencoding=utf-8 :
|
Bekt/tweetement | src/tqueue.py | Python | mit | 4,586 | 0 | import logging
import service
import webapp2
from models import (Query, Status, Method)
from collections import Counter
from google.appengine.api import mail
from google.appengine.ext import ndb
class QueueHandler(webapp2.RequestHandler):
def pop(self):
"""Pops a query from the queue and performs query expansion."""
qid = int(self.request.get('qid'))
auth_info = (self.request.get('oauth_token'),
self.request.get('oauth_token_secret'))
result = Query.get_by_id(qid)
if not result or result.status != Status.Pending:
logging.warning('Query not pending. qid={}'.format(qid))
return
logging.info('Queue pop: {}'.format(qid))
result.status = Status.Working
result.put()
try:
expand_query(qid, auth_info)
result.status = Status.Done
except Exception as e:
logging.exception(e.message)
result.status = Status.Cancelled
result.status_msg = e.message
result.put()
if result.status == Status.Done:
notify(qid)
def expand_query(qid, auth_info):
"""Expand the given query.
TODO(kanat): Clean this shit up.
"""
result = Query.get_by_id(qid)
q = result.query
serv = service.Service(auth_info[0], auth_info[1])
tweets = serv.fetch(result.query, limit=195)
hashtags = serv.top_hashtags(tweets, limit=10)
keywords = serv.top_keywords(tweets, limit=10, exclude=set(q.split()))
methods = []
# Method-0: q
if tweets:
methods.append(Method(qid=qid, version=0, query=q,
status_ids=_extract_ids(tweets[:10])))
# Method-1: q + h'
if hashtags:
q1 = q + ' ' + hashtags[0]
m1 = serv.fetch(q1, limit=10)
if m1:
methods.append(Method(qid=qid, version=1, query=q1,
status_ids=_extract_ids(m1)))
# Method-2: q + k'
if keywords:
q2 = q + ' ' + keywords[0]
m2 = serv.fetch(q2, limit=10)
if m2:
methods.append(Method(qid=qid, version=2, query=q2,
status_ids=_extract_ids(m2)))
# Method-3: q + h' + k'
if hashtags and keywords:
q3 = q + ' ' + hashtags[0] + ' ' + keywords[0]
m3 = serv.fetch(q3, limit=10)
if m3:
methods.append(Method(qid=qid, version=3, query=q3,
status_ids=_extract_ids(m3)))
# Method-4: q + h' + h''
if len(hashtags) > 1:
q4 = q + ' ' + hashtags[0] + ' ' + hashtags[1]
m4 = serv.fetch(q4, limit=10)
if m4:
methods.append(Method(qid=qid, version=4, query=q4,
status_ids=_extract_ids(m4)))
# Method-5: q + k' + k''
if len(keywords) > 1:
q5 = q + ' ' + keywords[0] + ' ' + keywords[1]
m5 = serv.fetch(q5, limit=10)
if m5:
methods.append(Method(qid=qid, version=5, query=q5,
status_ids=_extract_ids(m5)))
# Method-6: (q + h' OR q + k') => sort by max-matching h and k
if hashtags and keywords:
q6 = '{} {} OR {} | {}'.format(q, hashtags[0], q, keywords[0])
m6 = serv.fetch(q6, limit=195)
| counter = Counter()
for tweet in m6:
for token in set(tweet.text.split()):
ok, text = service._token_okay(token)
if ok and (text in hashtags or text in keywords):
counter[tweet.id] += 1
m6 = counter.most_common(10)
if m6:
methods.append(Method(qid=qid, version=6, query='q6',
status_ids=[k for k, v in m6]))
method_keys = ndb.put_multi(methods)
result.hashtags = hashtags
result.keywords = keywords
result.methods = method_keys
def _extract_ids(tweets):
return [t.id for t in tweets]
def notify(qid):
result = Query.get_by_id(qid)
if not result.email:
return
message = """
Expanded search results for "{query}" are available at {url}
Don't forget to provide feedback at the above URL.
"""
try:
url = 'http://tweetement.com/#/result/' + str(qid)
body = message.format(query=result.query, url=url)
mail.send_mail(sender='Tweetement <bekt17@gmail.com>',
to=result.email,
subject='Results for %s are ready' % result.query,
body=body)
except Exception as e:
# Oh well.
logging.exception(e)
|
Rastagong/A-Scholar-In-The-Woods | Releases/Post Compo/Windows build/play.py | Python | mit | 507 | 0.00789 | # -*-coding:iso-8859-1 -*
try: #If needed, the _path module must add the path to the Narro directory to sys.path so that the Narro Engine can be imported as a package
import _path
except:
pass
from constantes import *
from narro.main import *
from narro.constantes import *
from gestionnaireEvenements import *
if __name__ == "__main__":
je | u = Narro()
jeu.inclureGestionnaire(MonGestionnaireEvenements(jeu))
jeu.execu | ter()
if REDIRECTION_FICHIER_ERREURS:
sys.stderr.close()
|
laurentb/weboob | modules/bred/module.py | Python | lgpl-3.0 | 3,023 | 0.000662 | # -*- coding: utf-8 -*-
# Copyright(C) 2012-2014 Romain Bignon
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from weboob.capabilities.bank import CapBankWealth, AccountNotFound, Account
from weboob.capabilities.base import find_object
from weboob.capabilities.profile import CapProfile
from weboob.tools.backend import Module, BackendConfig
from weboob.tools.value import ValueBackendPassword, Value
from .bred import BredBrowser
from .dispobank import DispoBankBrowser
__all__ = ['BredModule']
class BredModule(Module, CapBankWealth, CapProfile):
NAME = 'bred'
MAINTAINER = u'Romain Bignon'
EMAIL = 'romain@weboob.org'
VERSION = '2.1'
DESCRIPTION = u'Bred'
LICENSE = 'LGPLv3+'
CONFIG = BackendConfig(
ValueBackendPassword('login', label='Identifiant', masked=False),
ValueBackendPassword('password', label='Mot de passe'),
Value('website', label="Site d'accès", default='bred',
choices={'bred': 'BRED', 'dispobank': 'DispoBank'}),
Value('accnum', label='Numéro du compte bancaire (optionnel)', default='', masked=False),
)
BROWSERS = {
'bred': BredBrowser,
'dispobank': DispoBankBrowser,
}
def create_default_browser(self):
| self.BROWSER = self.BROWSERS[self.config['website'].get()]
return self.create_browser(
self.config['accnum'].get().replace(' ', '').zfill(11),
self.config['login'].get(),
self.config['password'].get(),
weboob=self.weboob,
)
def iter_accounts(self):
return | self.browser.get_accounts_list()
def get_account(self, _id):
return find_object(self.browser.get_accounts_list(), id=_id, error=AccountNotFound)
def iter_history(self, account):
return self.browser.get_history(account)
def iter_coming(self, account):
return self.browser.get_history(account, coming=True)
def iter_investment(self, account):
return self.browser.get_investment(account)
def get_profile(self):
return self.browser.get_profile()
def fill_account(self, account, fields):
if self.config['website'].get() != 'bred':
return
self.browser.fill_account(account, fields)
OBJECTS = {
Account: fill_account,
}
|
etingof/talks | pyvo-optional-static-typing/code/12-add-annotations.py | Python | gpl-3.0 | 331 | 0 | """
This code will fail at runtime...
Could you help `mypy` catching the problem at compile time?
"""
def sum_numbers(*n) -> float:
"""Sums up any number of numbers"""
return sum(n)
if __name__ == '__main__':
sum_numbers(1, 2.0) # | this is not a bug
sum_numbers('4', 5) | # this is a bug - can `mypy` catch it?!
|
TwilioDevEd/api-snippets | rest/messages/sms-handle-callback/sms-handle-callback.7.x.py | Python | mit | 452 | 0 | from flask import Flask, request
import logging
logging.basicConfig(level=logging.INFO)
app = Flask(__name__)
@app | .route("/MessageStatus", methods=['POST'])
def incoming_sms():
message_si | d = request.values.get('MessageSid', None)
message_status = request.values.get('MessageStatus', None)
logging.info('SID: {}, Status: {}'.format(message_sid, message_status))
return ('', 204)
if __name__ == "__main__":
app.run(debug=True)
|
sknepneklab/SAMoS | configurations/MakeConfigurations/TissueWithObstacle/circle.py | Python | gpl-3.0 | 826 | 0.021792 | from obstacle import *
class Circle(Obstacle):
def __init__(self, Rc, dens, R):
Obstacle.__init__(self,Rc,dens)
if R <= 0:
raise Exception('Circle radius has to be positive')
self.R = R
def make(self, offset = 0):
N = int(np.round(2*np.pi*self.R*s | elf.dens))
phi = np.linspace(0,2*np.pi*(1-1/N),N)
x = self.Rc[0] + self.R*np.cos(phi)
y = self.Rc[1] + self.R*np.sin(phi)
z = np.zeros(N)
for i in range(N):
p = Particle(i + offset)
p.radius = 1.0
p.type = self.obstacle_type
p.r = np.array([x[i],y[i],z[i]])
p.v = | np.array([0,0,0])
p.n = np.array([0,0,0])
p.in_tissue = 0
p.boundary = 0
self.pos.append(p)
|
usrix/alfred-verbizr-workflow | verbizr.py | Python | gpl-2.0 | 1,716 | 0.020979 | # Library from https://github.com/nikipore/alfred-python
import alfred
import re
import unicodedata
import xml.dom
import xml.dom.minidom
import time
# XML verbs db
conjFR = xml.dom.minidom.parse('conjugation-fr.xml')
verbsFR = xml.dom.minidom.parse('verbs-fr.xml')
conjList = conjFR.getElementsByTagName('template')
verbList = verbsFR.getElementsByTagName('v')
usrInput = u'{query}'
pronoms =["je","tu","il/elle","nous","vous","ils/elles"]
results = []
# function to get template
def getTemplate( str ):
for v in verbList :
i = v.getElementsByTagName('i')[0].firstChild.nodeValue
t = v.getElementsByTagName('t')[0].firstChild.nodeValue
if i.encode('utf-8') == str.encode('utf-8') :
return(t)
#go for template
template = getTemplate(usrInput)
#check templat | e value
if (template != None) :
templateSplit = template.split(':')
radical = usrInput.replace(templateSplit[1], "")
termina = templateSplit[1]
# loop to check conj list
for v in conjList :
line = v.attributes | ['name'].value
if template == line:
# we are in the verb
# we need to extract the verb
# from
momentoNode = v.getElementsByTagName('indicative')
# get each conj
for tiempoNode in momentoNode :
#for present
personas = tiempoNode.getElementsByTagName('present')
for personaNode in personas :
persona = personaNode.getElementsByTagName('i')
idx = 0
# let s add the verbs to an arrat for alfred use
for i in persona :
item = alfred.Item({'uid': idx, 'arg': radical + i.firstChild.nodeValue}, radical + i.firstChild.nodeValue, pronoms[idx])
results.append(item)
idx += 1
xml = alfred.xml(results)
alfred.write(xml) |
virtuald/exaile | plugins/ipconsole/ipconsoleprefs.py | Python | gpl-2.0 | 1,656 | 0.006039 | # | Copyright (C) 2012 Brian Parma
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 1, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTI | CULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from xlgui.preferences import widgets
from xl import xdg
from xl.nls import gettext as _
import os
name = _('IPython Console')
basedir = os.path.dirname(os.path.realpath(__file__))
ui = os.path.join(basedir, 'ipconsole_prefs.ui')
icon = 'utilities-terminal'
class OpacityPreference(widgets.ScalePreference):
default = 80.0
name = 'plugin/ipconsole/opacity'
class FontPreference(widgets.FontButtonPreference):
default = 'Monospace 10'
name = 'plugin/ipconsole/font'
class TextColor(widgets.ColorButtonPreference):
default = 'lavender'
name = 'plugin/ipconsole/text_color'
class BgColor(widgets.ColorButtonPreference):
default = 'black'
name = 'plugin/ipconsole/background_color'
class Theme(widgets.ComboPreference):
default = 'Linux'
name = 'plugin/ipconsole/iptheme'
class AutoStart(widgets.CheckPreference):
default = False
name = 'plugin/ipconsole/autostart'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.