repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
oliver-sanders/cylc
|
cylc/flow/scripts/play.py
|
Python
|
gpl-3.0
| 999
| 0
|
#!/usr/bin/env python3
# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE.
# Copyright (C) NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is
|
distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# (hack to import the __doc__ from another file)
# type: ignore
from cylc.flow.scheduler_cli import ( # noqa: F401
play as mai
|
n,
PLAY_DOC as __doc__
)
# CLI of "cylc play". See cylc.flow.scheduler_cli for details.
|
mozilla/bedrock
|
tests/pages/regions/download_button.py
|
Python
|
mpl-2.0
| 947
| 0.003168
|
# This Source Code Form is subject to the terms of the Mozilla
|
Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from selenium.webdriver.common.by import By
from pages.base import BaseRegion
class DownloadButton(BaseRegion):
_download_link_locator = (By.CSS_SELECTOR, ".download-link")
@property
def platform_link(self):
els = [el for el in self.find_elements(*self._download_link_locator) if el.is_displayed()]
assert len(els) == 1, "Expected one p
|
latform link to be displayed"
return els[0]
@property
def is_displayed(self):
return self.root.is_displayed() and self.platform_link.is_displayed() or False
@property
def is_transitional_link(self):
return "/firefox/download/thanks/" in self.platform_link.get_attribute("href")
def click(self):
self.platform_link.click()
|
Vito2015/pyextend
|
pyextend/core/math.py
|
Python
|
gpl-2.0
| 643
| 0.001715
|
# coding: utf-8
"""
pyextend.co
|
re.math
~~~~~~~~~~~~~~~~~~
pyextend core math tools.
:copyright: (c) 2016 by Vito.
:license: GNU, see LICENSE for more details.
"""
def isprime(n):
"""Check the number is prime value. if prime value returns True, not False."""
n = abs(int(n))
if n < 2:
return False
if n == 2:
return True
if not n & 1:
return False
# 在一般领域, 对正整数n, 如果用2 到 sqrt(n) 之间所有整数去除, 均无法整除, 则n为质数.
for x in range(3, int(n ** 0.5)+1, 2):
if n %
|
x == 0:
return False
return True
|
SorenOlegnowicz/tracker
|
tracker/api/views.py
|
Python
|
agpl-3.0
| 739
| 0.005413
|
from django.shortcuts import render
from rest_framework import generics, serializers
from beacon.models import Inquiry, Reply
class
|
InquirySerializer(serializers.ModelSerializer):
class Meta:
model = Inquiry
class ReplySerializer(serializers.ModelSerializer):
class Meta:
model = Reply
class InquiryUpdateAPIView(generics.RetrieveUpdateAPIView):
serializer_class = InquirySerializer
queryset = Inquiry.objects.all()
def dispatch(self, request, *args, **kwargs):
print(request)
|
print(request.body)
return super().dispatch(request,*args,**kwargs)
class ReplyListAPIView(generics.RetrieveAPIView):
serializer_class = ReplySerializer
queryset = Reply.objects.all()
|
francisrod01/wrangling_mongodb
|
lesson 9/using_push.py
|
Python
|
mit
| 2,530
| 0.004348
|
#!~/envs/udacity_python3_mongodb
"""
$push is similar to $addToSet. The difference is that rather than accumulating only unique values
it aggregates all values into an array.
Using an aggregation query, count the number of tweets for each user. In the same $group stage,
use $push to accumulate all the tweet texts for each user. Limit your output to the 5 users
with the most tweets.
Your result documents should include only the fields:
"_id" (screen name of user),
"count" (number of tweets found for the user),
"tweet_texts" (a list of the tweet texts found for the user).
Please modify only the 'make_pipeline' function so that it creates and returns an aggregation
pipeline that can be passed to the MongoDB aggregate function. As in our examples in this lesson,
the aggregation pipeline should be a list of one or more dictionary objects.
Please review the lesson examples if you are unsure of the syntax.
Your code will be run against a MongoDB instance that we have provided. If you want to run this code
locally on your machine, you have to
|
install MongoDB, download and insert the dataset.
For instructions related to MongoDB setup and datasets please see Course Materials.
Please note that the dataset you are using here is a smaller version of the twitter dataset used in
examples in this lesson. If you attempt some of the same queries that we looked at in the lesson
examples, your results will be different.
|
"""
def get_db(db_name):
from pymongo import MongoClient
client = MongoClient('localhost:27017')
db = client[db_name]
return db
def make_pipeline():
pipeline = [
{
"$match": {
"user.statuses_count": {"$gte": 0}
}
},
{
"$group": {
"_id": "$user.screen_name",
"count": {"$sum": 1},
"tweet_texts": {"$push": "$text"}
}
},
{
"$sort": {"count": -1}
},
{
"$limit": 5
}
]
return pipeline
def aggregate(db, pipeline):
return [doc for doc in db.twitter.aggregate(pipeline)]
if __name__ == '__main__':
db = get_db('twitter')
pipeline = make_pipeline()
result = aggregate(db, pipeline)
import pprint
pprint.pprint(result)
assert len(result) == 5
assert result[0]["count"] > result[4]["count"]
sample_tweet_text = u'Take my money! #liesguystell http://movie.sras2.ayorganes.com'
assert result[4]["tweet_texts"][0] == sample_tweet_text
|
jim-easterbrook/python-gphoto2
|
examples/read-exif-exifread.py
|
Python
|
gpl-3.0
| 3,776
| 0.001589
|
#!/usr/bin/env python
# python-gphoto2 - Python interface to libgphoto2
# http://github.com/jim-easterbrook/python-gphoto2
# Copyright (C) 2015-19 Jim Easterbrook jim@jim-easterbrook.me.uk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# read just enough of an image to decode the Exif data
from __future__ import print_function
from datetime import datetime
import io
import logging
import os
import sys
import exifread
import gphoto2 as gp
def list_files(camera, path='/'):
result = []
# get files
for name, value in camera.folder_list_files(path):
result.append(os.path.join(path, name))
# read folders
folders = []
for name, value in camera.folder_list_folders(path):
folders.append(name)
# recurse over subfolders
for name in folders:
result.extend(list_files(camera, os.path.join(path, name)))
return result
class PseudoFile(object):
def __init__(self, camera, path):
self._camera = camera
self._folder, self._file_name = os.path.split(path)
info = self._camera.file_get_info(
self._folder, self._file_name)
self._size = info.file.size
self._ptr = 0
self._buf = bytearray(16 * 1024)
self._buf_ptr = 0
self._buf_len = 0
def read(self, size=None):
if size is None or size < 0:
size = self._size - self._ptr
if (self._ptr < self._buf_ptr or
self._ptr >= self._buf_ptr + self._buf_len):
self._buf_ptr = s
|
elf._ptr - (self._ptr % len(self._buf))
self._bu
|
f_len = self._camera.file_read(
self._folder, self._file_name, gp.GP_FILE_TYPE_NORMAL,
self._buf_ptr, self._buf)
offset = self._ptr - self._buf_ptr
size = min(size, self._buf_len - offset)
self._ptr += size
return self._buf[offset:offset + size]
def seek(self, offset, whence=0):
if whence == 0:
self._ptr = offset
elif whence == 1:
self._ptr += offset
else:
self._ptr = self._size - self.ptr
def tell(self):
return self._ptr
def get_file_exif(camera, path):
pf = PseudoFile(camera, path)
return exifread.process_file(pf)
def main():
logging.basicConfig(
format='%(levelname)s: %(name)s: %(message)s', level=logging.WARNING)
callback_obj = gp.check_result(gp.use_python_logging())
camera = gp.Camera()
camera.init()
files = list_files(camera)
if not files:
print('No files found')
return 1
print('File list')
print('=========')
for path in files[:10]:
print(path)
print('...')
for path in files[-10:]:
print(path)
print()
print('Exif data')
print('=========')
for path in files:
if os.path.splitext(path)[1].lower() != '.jpg':
continue
exif = get_file_exif(camera, path)
for key in ('EXIF DateTimeOriginal', 'EXIF LensModel', 'Image Copyright'):
if key in exif:
print(key, ':', exif[key])
break
print()
camera.exit()
return 0
if __name__ == "__main__":
sys.exit(main())
|
sander76/home-assistant
|
homeassistant/components/xiaomi_aqara/__init__.py
|
Python
|
apache-2.0
| 12,713
| 0.000551
|
"""Support for Xiaomi Gateways."""
from datetime import timedelta
import logging
import voluptuous as vol
from xiaomi_gateway import XiaomiGateway, XiaomiGatewayDiscovery
from homeassistant import config_entries, core
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
ATTR_DEVICE_ID,
ATTR_VOLTAGE,
CONF_HOST,
CONF_MAC,
CONF_PORT,
CONF_PROTOCOL,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import callback
from homeassistant.helpers import device_registry as dr
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util.dt import utcnow
from .const import (
CONF_INTERFACE,
CONF_KEY,
CONF_SID,
DEFAULT_DISCOVERY_RETRY,
DOMAIN,
GATEWAYS_KEY,
LISTENER_KEY,
)
_LOGGER = logging.getLogger(__name__)
GATEWAY_PLATFORMS = ["binary_sensor", "sensor", "switch", "light", "cover", "lock"]
GATEWAY_PLATFORMS_NO_KEY = ["binary_sensor", "sensor"]
ATTR_GW_MAC = "gw_mac"
ATTR_RINGTONE_ID = "ringtone_id"
ATTR_RINGTONE_VOL = "ringtone_vol"
TIME_TILL_UNAVAILABLE = timedelta(minutes=150)
SERVICE_PLAY_RINGTONE = "play_ringtone"
SERVICE_STOP_RINGTONE = "stop_ringtone"
SERVICE_ADD_DEVICE = "add_device"
SERVICE_REMOVE_DEVICE = "remove_device"
SERVICE_SCHEMA_PLAY_RINGTONE = vol.Schema(
{
vol.Required(ATTR_RINGTONE_ID): vol.All(
vol.Coerce(int), vol.NotIn([9, 14, 15, 16, 17, 18, 19])
),
vol.Optional(ATTR_RINGTONE_VOL): vol.All(
vol.Coerce(int), vol.Clamp(min=0, max=100)
),
}
)
SERVICE_SCHEMA_REMOVE_DEVICE = vol.Schema(
{vol.Required(ATTR_DEVICE_ID): vol.All(cv.string, vol.Length(min=14, max=14))}
)
def setup(hass, config):
"""Set up the Xiaomi component."""
def play_ringtone_service(call):
"""Service to play ringtone through Gateway."""
ring_id = call.data.get(ATTR_RINGTONE_ID)
gateway = call.data.get(ATTR_GW_MAC)
kwargs = {"mid": ring_id}
ring_vol = call.data.get(ATTR_RINGTONE_VOL)
if ring_vol is not None:
kwargs["vol"] = ring_vol
gateway.write_to_hub(gateway.sid, **kwargs)
def stop_ringtone_service(call):
"""Service to stop playing ringtone on Gateway."""
gateway = call.data.get(ATTR_GW_MAC)
gateway.write_to_hub(gateway.sid, mid=10000)
def add_device_service(call):
"""Service to add a new sub-device within the next 30 seconds."""
gateway = call.data.get(ATTR_GW_MAC)
gateway.write_to_hub(gateway.sid, join_permission="yes")
hass.components.persistent_notification.async_create(
"Join permission enabled for 30 seconds! "
"Please press the pairing button of the new device once.",
title="Xiaomi Aqara Gateway",
)
def remove_device_service(call):
"""Service to remove a sub-device from the gateway."""
device_id = call.data.get(ATTR_DEVICE_ID)
gateway = call.data.get(ATTR_GW_MAC)
gateway.write_to_hub(gateway.sid, remove_device=device_id)
gateway_only_schema = _add_gateway_to_schema(hass, vol.Schema({}))
hass.services.register(
DOMAIN,
SERVICE_PLAY_RINGTONE,
play_ringtone_service,
schema=_add_gateway_to_schema(hass, SERVICE_SCHEMA_PLAY_RINGTONE),
)
hass.services.register(
DOMAIN, SERVICE_STOP_RINGTONE, stop_ringtone_service, schema=gateway_only_schema
)
hass.services.register(
DOMAIN, SERVICE_ADD_DEVICE, add_device_service, schema=gateway_only_schema
)
hass.services.register(
DOMAIN,
SERVICE_REMOVE_DEVICE,
remove_device_service,
schema=_add_gateway_to_schema(hass, SERVICE_SCHEMA_REMOVE_DEVICE),
)
return True
async def async_setup_entry(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Set up the xiaomi aqara components from a config entry."""
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN].setdefault(GATEWAYS_KEY, {})
# Connect to Xiaomi Aqara Gateway
xiaomi_gateway = await hass.async_add_executor_job(
XiaomiGateway,
entry.data[CONF_HOST],
entry.data[CONF_SID],
entry.data[CONF_KEY],
DEFAULT_DISCOVERY_RETRY,
entry.data[CONF_INTERFACE],
entry.data[CONF_PORT],
entry.data[CONF_PROTOCOL],
)
hass.data[DOMAIN][GATEWAYS_KEY][entry.entry_id] = xiaomi_gateway
gateway_discovery = hass.data[DOMAIN].setdefault(
LISTENER_KEY,
XiaomiGatewayDiscovery(hass.add_job, [], entry.data[CONF_INTERFACE]),
)
if len(hass.data[DOMAIN][GATEWAYS_KEY]) == 1:
# start listining for local pushes (only once)
await hass.async_add_executor_job(gateway_discovery.listen)
# register stop callback to shutdown listining for local pushes
def stop_xiaomi(event):
"""Stop Xiaomi Socket."""
_LOGGER.debug("Shutting down Xiaomi Gateway Listener")
gateway_discovery.stop_listen()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_xiaomi)
gateway_discovery.gateways[entry.data[CONF_HOST]] = xiaomi_gateway
_LOGGER.debug(
"Gateway with host '%s' connected, listening for broadcasts",
entry.data[CONF_HOST],
)
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, entry.unique_id)},
manufacturer="Xiaomi Aqara",
name=entry.title,
sw_version=entry.data[CONF_PROTOCOL],
)
if entry.data[CONF_KEY] is not None:
platforms = GATEWAY_PLATFORMS
else:
platforms = GATEWAY_PLATFORMS_NO_KEY
hass.config_entries.async_setup_platforms(entry, platforms)
return True
async def async_unload_entry(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Unload a config entry."""
if entry.data[CONF_KEY] is not None:
platforms = GATEWAY_PLATFORMS
else:
platforms = GATEWAY_PLATFORMS_NO_KEY
unload_ok = await hass.config_entries.async_unload_platforms(entry, platforms)
if unload_ok:
hass.data[DOMAIN][GATEWAYS_KEY].pop(entry.entry
|
_id)
if len(hass.
|
data[DOMAIN][GATEWAYS_KEY]) == 0:
# No gateways left, stop Xiaomi socket
hass.data[DOMAIN].pop(GATEWAYS_KEY)
_LOGGER.debug("Shutting down Xiaomi Gateway Listener")
gateway_discovery = hass.data[DOMAIN].pop(LISTENER_KEY)
await hass.async_add_executor_job(gateway_discovery.stop_listen)
return unload_ok
class XiaomiDevice(Entity):
"""Representation a base Xiaomi device."""
def __init__(self, device, device_type, xiaomi_hub, config_entry):
"""Initialize the Xiaomi device."""
self._state = None
self._is_available = True
self._sid = device["sid"]
self._model = device["model"]
self._protocol = device["proto"]
self._name = f"{device_type}_{self._sid}"
self._device_name = f"{self._model}_{self._sid}"
self._type = device_type
self._write_to_hub = xiaomi_hub.write_to_hub
self._get_from_hub = xiaomi_hub.get_from_hub
self._extra_state_attributes = {}
self._remove_unavailability_tracker = None
self._xiaomi_hub = xiaomi_hub
self.parse_data(device["data"], device["raw_data"])
self.parse_voltage(device["data"])
if hasattr(self, "_data_key") and self._data_key: # pylint: disable=no-member
self._unique_id = (
f"{self._data_key}{self._sid}" # pylint: disable=no-member
)
else:
self._unique_id = f"{self._type}{self._sid}"
self._gateway_id = config_entry.unique_id
if config_entry.data[CONF_MAC] == format_mac(self._sid):
# this entity belongs to the gateway itself
self._is_gateway = True
self._device_id = config_entry.unique_id
else:
|
clinton-hall/nzbToMedia
|
core/auto_process/managers/pymedusa.py
|
Python
|
gpl-3.0
| 6,117
| 0.00327
|
import time
from core import logger
from core.auto_process.common import ProcessResult
from core.auto_process.managers.sickbeard import SickBeard
import requests
class PyMedusa(SickBeard):
"""PyMedusa class."""
def __init__(self, sb_init):
super(PyMedusa, self).__init__(sb_init)
def _create_url(self):
return '{0}{1}:{2}{3}/home/postprocess/processEpisode'.format(self.sb_init.protocol, self.sb_init.host, self.sb_init.port, self.sb_init.web_root)
class PyMedusaApiV1(SickBeard):
"""PyMedusa apiv1 class."""
def __init__(self, sb_init):
super(PyMedusaApiV1, self).__init__(sb_init)
def _create_url(self):
return '{0}{1}:{2}{3}/api/{4}/'.format(self.sb_init.protocol, self.sb_init.host, self.sb_init.port, self.sb_init.web_root, self.sb_init.apikey)
def api_call(self):
self._process_fork_prarams()
url = self._create_url()
logger.debug('Opening URL: {0} with params: {1}'.format(url, self.sb_init.fork_params), self.sb_init.section)
try:
response = self.session.get(url, auth=(self.sb_init.username, self.sb_init.password), params=self.sb_init.fork_params, stream=True, verify=False, timeout=(30, 1800))
except requests.ConnectionError:
logger.error('Unable to open URL: {0}'.format(url), self.sb_init.section)
return ProcessResult(
message='{0}: Failed to post-process - Unable to connect to {0}'.format(self.sb_init.section),
status_code=1,
)
if response.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
logger.error('Server returned status {0}'.format(response.status_code), self.sb_init.section)
return ProcessResult(
message='{0}: Failed to post-process - Server returned status {1}'.format(self.sb_init.section, response.status_code),
status_code=1,
)
if response.json()['result'] == 'success':
return ProcessResult(
message='{0}: Successfully post-processed {1}'.format(self.sb_init.section, self.input_name),
status_code=0,
)
return ProcessResult(
message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(self.sb_init.section),
status_code=1, # We did not receive Success confirmation.
)
class PyMedusaApiV2(SickBeard):
"""PyMedusa apiv2 class."""
def __init__(self, sb_init):
super(PyMedusaApiV2, self).__init__(sb_init)
# Check for an apikey, as this is required with using fork = medusa-apiv2
if not sb_init.apikey:
raise Exception('For the section SickBeard `fork = medusa-apiv2` you also need to configure an `apikey`')
def _create_url(self):
return '{0}{1}:{2}{3}/api/v2/postprocess'.format(self.sb_init.protocol, self.sb_init.host, self.sb_init.port, self.sb_init.web_root)
def _get_identifier_status(self, url):
# Loop through requesting medusa for the status on the queueitem.
try:
response = self.session.get(url, verify=False, timeout=(30, 1800))
except requests.ConnectionError:
logger.error('Unable to get postprocess identifier statu
|
s', self.sb_init.section)
return False
tr
|
y:
jdata = response.json()
except ValueError:
return False
return jdata
def api_call(self):
self._process_fork_prarams()
url = self._create_url()
logger.debug('Opening URL: {0}'.format(url), self.sb_init.section)
payload = self.sb_init.fork_params
payload['resource'] = self.sb_init.fork_params['nzbName']
del payload['nzbName']
# Update the session with the x-api-key
self.session.headers.update({
'x-api-key': self.sb_init.apikey,
'Content-type': 'application/json'
})
# Send postprocess request
try:
response = self.session.post(url, json=payload, verify=False, timeout=(30, 1800))
except requests.ConnectionError:
logger.error('Unable to send postprocess request', self.sb_init.section)
return ProcessResult(
message='{0}: Unable to send postprocess request to PyMedusa',
status_code=1,
)
# Get UUID
if response:
try:
jdata = response.json()
except ValueError:
logger.debug('No data returned from provider')
return False
if not jdata.get('status') or not jdata['status'] == 'success':
return False
queueitem_identifier = jdata['queueItem']['identifier']
wait_for = int(self.sb_init.config.get('wait_for', 2))
n = 0
response = {}
url = '{0}/{1}'.format(url, queueitem_identifier)
while n < 12: # set up wait_for minutes to see if command completes..
time.sleep(5 * wait_for)
response = self._get_identifier_status(url)
if response and response.get('success'):
break
if 'error' in response:
break
n += 1
# Log Medusa's PP logs here.
if response.get('output'):
for line in response['output']:
logger.postprocess('{0}'.format(line), self.sb_init.section)
# For now this will most likely always be True. But in the future we could return an exit state
# for when the PP in medusa didn't yield an expected result.
if response.get('success'):
return ProcessResult(
message='{0}: Successfully post-processed {1}'.format(self.sb_init.section, self.input_name),
status_code=0,
)
return ProcessResult(
message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(self.sb_init.section),
status_code=1, # We did not receive Success confirmation.
)
|
andredalton/bcc
|
2014/MAC0242/Projeto/battle.py
|
Python
|
apache-2.0
| 6,126
| 0.004255
|
# Para manipular a linha de comando
import os, sys, getopt, re
# Para manipular e validar arquivos XML
from lxml import etree
from lxml.etree import XMLSyntaxError, Element
# Quem é este pokemon?
from pokemon.pokemon import Pokemon
# Battle state schema file
bss = 'battle_state.xsd'
def usage():
"""Imprime instruções de uso do programa."""
uso = """
Este programa carrega um pokemon para uso como cliente ou servidor.
-h --help Imprime isto
-a --auto Inicializa com o pokemon em modo automatico
-f --file Carrega as informações de um arquivo separado por linhas.
-x --xml Carrega as informações de um arquivo xml.
-p --port Permite a passagem da porta de acesso do servidor por linha de comando
-H --host Permite a passagem da URL principal do programa por linha de comando
"""
print(uso)
def command_line(argv, host="0.0.0.0", port=5000):
""" Faz o tratamento da entrada de dados. """
pk = Pokemon()
flag = False
p = port
h = host
if len(argv) > 0:
try:
opts, args = getopt.getopt(argv, "haf:x:p:H:", ["help", "auto", "file=", "xml=", "port=", "host="])
except getopt.GetoptError as err:
print(err)
usage()
sys.exit(2)
# Evitando erros com passagens de argumentos com conteudo obrigatorio vazio.
args = ["-h", "--help", "-a", "--auto", "-f", "--file", "-x", "--xml", "-p", "--port", "-H", "--host"]
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-a", "--auto"):
pk.set_auto()
elif o in ("-f", "--file"):
if a in args:
print("option -f requires argument")
usage()
sys.exit()
try:
f = open(a, "r")
except FileNotFoundError:
print("Arquivo nao encontrado!")
sys.exit()
pk.load(f)
flag = True
elif o in ("-x", "--xml"):
if a in args:
print("option -x requires argument")
usage()
sys.exit()
try:
s = open(a, "r").read()
except FileNotFoundError:
print("Arquivo nao encontrado!")
sys.exit()
try:
pk.load_xml(validate(s)[0])
except TypeError:
sys.exit()
flag = True
elif o in ("-p", "--port"):
i
|
f a in args:
print("option -p requires argument")
usage()
|
sys.exit()
try:
p = int(a)
except ValueError:
print("Por favor passe uma porta valida!")
sys.exit()
elif o in ("-H", "--host"):
if a in args:
print("option -H requires argument")
usage()
sys.exit()
h = a
else:
assert False, "opcao nao tratada"
else:
pk.load()
if flag:
return (pk, p, h)
return None
def validate(s):
""" Faz a validação de um battle_state. """
bsv = open(bss, "r").read()
xml = re.sub("encoding=['\"].*['\"]", "", s)
schema_root = etree.XML(bsv)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema = schema)
try:
root = etree.fromstring(xml, parser)
except XMLSyntaxError:
print("Formato XML incorreto!")
return None
return root.findall("pokemon")
def make_battle_state(pk1, pk2=None):
""" Gera um battle_state. """
top = Element('battle_state')
x1 = pk1.make_xml()
top.append(x1)
if pk2 is not None:
x2 = pk2.make_xml()
top.append(x2)
return etree.tostring(top, xml_declaration=True, pretty_print=True, encoding="UTF-8").decode("utf-8")
def simple_duel(patt, pdef, n=None, run=True, ppm=None):
""" Resolve um duelo simples.
patt: Pokemon atacante
pdef: Pokemon defensor
n: Número do ataque
run: Para realizar o ataque
ppm: Memória dos pps, usado quando o usuário está comandando o pokemon
"""
an = None
if patt.get_HP() > 0 and pdef.get_HP() > 0:
params = {
"name1":patt.get_name(),
"name2":pdef.get_name(),
"hp1":patt.get_HP(),
"hp2":pdef.get_HP(),
}
print("\n%(hp1)d\t- %(name1)s" % params)
print("%(hp2)s\t- %(name2)s\n" % params)
if patt.get_auto():
an = patt.on_my_own(pdef)
a = patt.select_attack(an)
elif n is not None:
an = int(n)
a = patt.select_attack(an)
else:
a = None
if patt.left_pp() > 0 and not patt.get_auto():
print("\nAtaques de", patt.get_name())
patt.print_attack(ppm)
while a is None:
try:
an = int(input("Selecione um ataque para " + patt.get_name() + ": "))
a = patt.select_attack(an)
except ValueError:
print("Digite um número entre 1 e", patt.get_nattack())
if a is None:
print("Digite um número entre 1 e", patt.get_nattack())
else:
print("%(name)s has no moves left!" % {"name": patt.get_name()})
an = 0
a = patt.select_attack(0)
if run:
a.prepare(pdef)
a.action()
if pdef.get_HP() == 0:
print("%s fainted!" % pdef.get_name())
if patt.get_HP() == 0:
print("%s fainted!" % patt.get_name())
if pdef.get_HP()==0 or patt.get_HP()==0:
print("\nBatalha encerrada!")
return an
|
googleapis/python-kms
|
google/cloud/kms_v1/services/key_management_service/transports/grpc.py
|
Python
|
apache-2.0
| 50,118
| 0.001576
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers
from google.api_core import gapic_v1
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.kms_v1.types import resources
from google.cloud.kms_v1.types import service
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from .base import KeyManagementServiceTransport, DEFAULT_CLIENT_INFO
class KeyManagementServiceGrpcTransport(KeyManagementServiceTransport):
"""gRPC backend transport for KeyManagementService.
Google Cloud Key Management Service
|
Manages cryptographic keys and operations using those keys.
Implements a REST model with the following object
|
s:
- [KeyRing][google.cloud.kms.v1.KeyRing]
- [CryptoKey][google.cloud.kms.v1.CryptoKey]
- [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]
- [ImportJob][google.cloud.kms.v1.ImportJob]
If you are using manual gRPC libraries, see `Using gRPC with Cloud
KMS <https://cloud.google.com/kms/docs/grpc>`__.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(
self,
*,
host: str = "cloudkms.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier
|
homma/terminal-dots
|
app.py
|
Python
|
mit
| 2,068
| 0.024662
|
#!/usr/bin/env python
## app.py
import curses
import locale
import key_event
import key_move
import model
import edit
import copy
# global data holder
class App():
def __init__(self):
self.win = curses.initscr()
self.setup_window()
h, w = self.win.getmaxyx()
self.data = model.Model(w, h)
def setup_window(self):
curses.curs_set(False)
curses.start_color()
curses.use_default_colors()
curses.init_pair(1, -1, -1)
self.win.bkgd(' ', curses.color_pair(1))
# event loop
def loop(self):
while True:
key = self.win.getkey()
key_event.do_key_action(self, key)
def quit_app(self, args):
curses.nocbreak(); self.win.keypad(0); curses.echo()
curses.endwin()
self.data.dump()
quit()
def add_event(app):
# cursor moves
key_event.add_key_action('k', key_move.up)
key_event.add_key_action('j', key_move.down)
key_event.add_key_action('H', key_move.top)
key_event.add_key_action('L', key_move.bottom)
key_event.add_key_action('h', key_move.left)
key_event.add_key_action('l', key_move.right)
key_event.add_key_action('J', key_move.left_end)
key_event.add_key_action('a', key_move.left_end)
key_event.add_key_action('K', key_move.right_end)
key_event.add_key_action('e', key_move.right_end)
# data modification
key_event.add_key_action('0', edit.zero)
key_event.add_key_action('1', edit.one)
key_event.add_key_action('2', edit.two)
key_event.add_key_action('3', edit.three)
key_event.add_key_action('4', edit.four)
key_event.add_key_action('5', edit.five)
key_event.add_key_action('6', edit.six)
key_event.add_key_action('7', edit.seven)
key_event.add_key_action('8', edit.eight)
key_event.add_key_action('9', edit.nine)
# copy, paste
key_event.add_key_action('y', copy.copy)
key_event.add_key_action('p', copy.paste)
# quit
key_event.add_key_action('q', app.quit_app)
def
|
main(args):
app = App()
add_event(app)
app.loop()
# set locale before initialize curses
locale.setlocale(locale.LC_ALL, "")
curses.wrapper(m
|
ain)
|
FreedomCoop/valuenetwork
|
work/__init__.py
|
Python
|
agpl-3.0
| 47
| 0
|
default_app_config = 'work.apps.WorkAppConfi
|
g'
|
|
claudiordgz/GoodrichTamassiaGoldwasser
|
ch01/r112.py
|
Python
|
mit
| 3,148
| 0.000953
|
""" Python's random module includes a function choice(data) that returns a
random element from a non-empty sequence. The random module includes
a more basic function randrange, with parametrization similar to
the built-in range function, that return a random choice from the given
range. Using only the randrange function, implement your own version
of the choice function.
>>> data = [2,3,4,5,6,7,8,9,10,11,10,9,8,7,6,5,4,3,2,1]
>>> results = list()
>>> for x in range(len(data)*20):
... val = custom_choice(data)
... results.append(val in data)
>>> print(results)
[True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True,
|
True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True
|
, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True, \
True, True, True, True, True, True, True, True, True, True]
"""
def custom_choice(data):
import random
return data[random.randrange(0,len(data))]
|
spilgames/mysql-statsd
|
mysql_statsd/mysql_statsd.py
|
Python
|
bsd-3-clause
| 5,234
| 0.004776
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import Queue
import signal
import sys
import os
import threading
import time
from ConfigParser import ConfigParser
from thread_manager import ThreadManager
from thread_mysql import ThreadMySQL
from thread_statsd import ThreadStatsd, ThreadFakeStatsd
class MysqlStatsd():
"""Main program class"""
opt = None
config = None
def __init__(self):
"""Program entry point"""
op = argparse.ArgumentParser()
op.add_argument("-c", "--config", dest="cfile",
default="/etc/mysql-statsd.conf",
help="Configuration file"
)
op.add_argument("-d", "--debug", dest="debug",
help="Prints statsd metrics next to sending them",
default=False, action="store_true"
)
op.add_argument("--dry-run", dest="dry_run",
default=False,
action="store
|
_true",
help="Print the output that would be sent to statsd without actually sending data somewhere"
)
# TODO switch the default to True, and make it fork by default in init script.
op.add_argument("-f", "--foreground", dest="foreground", help="Dont fork main program", default=False, action="store_true")
opt = op.parse_args()
self.get_config(opt.cfile)
i
|
f not self.config:
sys.exit(op.print_help())
try:
logfile = self.config.get('daemon').get('logfile', '/tmp/daemon.log')
except AttributeError:
logfile = sys.stdout
pass
if not opt.foreground:
self.daemonize(stdin='/dev/null', stdout=logfile, stderr=logfile)
# Set up queue
self.queue = Queue.Queue()
# split off config for each thread
mysql_config = dict(mysql=self.config['mysql'])
mysql_config['metrics'] = self.config['metrics']
statsd_config = self.config['statsd']
# Spawn MySQL polling thread
mysql_thread = ThreadMySQL(queue=self.queue, **mysql_config)
# t1 = ThreadMySQL(config=self.config, queue=self.queue)
# Spawn Statsd flushing thread
statsd_thread = ThreadStatsd(queue=self.queue, **statsd_config)
if opt.dry_run:
statsd_thread = ThreadFakeStatsd(queue=self.queue, **statsd_config)
if opt.debug:
""" All debug settings go here """
statsd_thread.debug = True
# Get thread manager
tm = ThreadManager(threads=[mysql_thread, statsd_thread])
try:
tm.run()
except:
# Protects somewhat from needing to kill -9 if there is an exception
# within the thread manager by asking for a quit an joining.
try:
tm.stop_threads()
except:
pass
raise
def get_config(self, config_file):
cnf = ConfigParser()
try:
cnf.read(config_file)[0]
except IndexError:
# Return None so we can display help...
self.config = None # Just to be safe..
return None
self.config = {}
for section in cnf.sections():
self.config[section] = {}
for key, value in cnf.items(section):
self.config[section][key] = value
return self.config
def daemonize(self, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
'''This forks the current process into a daemon. The stdin, stdout, and
stderr arguments are file names that will be opened and be used to replace
the standard file descriptors in sys.stdin, sys.stdout, and sys.stderr.
These arguments are optional and default to /dev/null. Note that stderr is
opened unbuffered, so if it shares a file with stdout then interleaved
output may not appear in the order that you expect. '''
# Do first fork.
try:
pid = os.fork()
if pid > 0:
sys.exit(0) # Exit first parent.
except OSError, e:
sys.stderr.write("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror))
sys.exit(1)
# Decouple from parent environment.
# TODO: do we need to change to '/' or can we chdir to wherever __file__ is?
os.chdir("/")
os.umask(0)
os.setsid()
# Do second fork.
try:
pid = os.fork()
if pid > 0:
f = open(self.config.get('daemon').get('pidfile', '/var/run/mysql_statsd.pid'), 'w')
f.write(str(pid))
f.close()
sys.exit(0) # Exit second parent.
except OSError, e:
sys.stderr.write("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror))
sys.exit(1)
# Now I am a daemon!
# Redirect standard file descriptors.
si = open(stdin, 'r')
so = open(stdout, 'a+')
se = open(stderr, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
if __name__ == "__main__":
program = MysqlStatsd()
|
garyd203/flying-circus
|
src/flyingcircus/service/appsync.py
|
Python
|
lgpl-3.0
| 351
| 0.002849
|
"""General-use clas
|
ses to interact with the AppSync service through CloudFormation.
See Also:
`AWS developer guide for AppSync
<https://docs.aws.amazon.com/appsync/latest/devguide/welcome.html>`_
"""
# noinspection PyUnresolvedReferences
from .._raw import appsync as _raw
# noins
|
pection PyUnresolvedReferences
from .._raw.appsync import *
|
robocomp/robocomp-robolab
|
components/localization/UWBpublisher/src/specificworker.py
|
Python
|
gpl-3.0
| 6,357
| 0.002832
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 by YOUR NAME HERE
#
# This file is part of RoboComp
#
# RoboComp is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RoboComp is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RoboComp. If not, see <http://www.gnu.org/licenses/>.
#
from genericworker import *
from PySide2.QtWidgets import QApplication
import decawave_ble as deca
# If RoboComp was compiled with Python bindings you can use InnerModel in Python
# sys.path.append('/opt/robocomp/lib')
# import librobocomp_qmat
# import librobocomp_osgviewer
# import librobocomp_innermodel
class SpecificWorker(GenericWorker):
def __init__(self, proxy_map):
super(SpecificWorker, self).__init__(proxy_map)
self.Period = 2000
self.timer.start(self.Period)
self.defaultMachine.start()
self.destroyed.connect(self.t_compute_to_finalize)
self.devicelist = []
def __del__(self):
print('SpecificWorker destructor')
self.t_compute_to_finalize.emit()
def setParams(self, params):
# try:
# self.innermodel = InnerModel(params["InnerModelPath"])
# except:
# traceback.print_exc()
# print("Error reading config params")
return True
@QtCore.Slot()
def compute(self):
data = None
for x in range(4):
try:
data = deca.get_data_multiple_devices(self.devicelist)
if data is not None:
break
except Ice.Exception as e:
traceback.print_exc()
print(e)
self.t_compute_to_finalize.emit()
break
except (KeyboardInterrupt, SystemExit, SystemError) as e:
traceback.print_exc()
print(e)
self.t_compute_to_finalize.emit()
break
except:
continue
taglist = []
if data is not None:
for key, device in data.items():
if device is not None and "location_data" in device:
position = PointUWB()
if device["location_data"] is None:
continue
if device["location_data"]["position_data"] is None:
continue
position.deviceName = key
position.x = float(device["location_data"]["position_data"]["x_position"])
position.y = float(device["location_data"]["position_data"]["y_position"])
position.z = float(device["location_data"]["position_data"]["z_position"])
position.tag = not bool(device["operation_mode_data"]["device_type"])
taglist.append(position)
try:
self.uwbsimple_proxy.getTagPositions(taglist)
except Ice.Exception as e:
print(e)
traceback.print_exc()
print(e)
except Exception as e:
print(e)
return True
# =============== Slots methods for State Machine ===================
# ===================================================================
#
# sm_initialize
#
@QtCore.Slot()
def sm_initialize(self):
print("Entered state initialize")
if os.getuid() != 0:
print("You need root privileges to run this component\nTry executing us
|
ing 'sudo' before the call")
self.t_initialize_to_finalize.emit()
else:
_original = deca.is_decawave_scan_entry
def is_decawave_scan_entry(scan_entry):
for (adtype, desc, value) in scan_entry.getScanData():
if adtype == 33 or desc == "128b Service Data" or "e72913c2a1" in value:
return True
continue
|
return _original(scan_entry)
deca.is_decawave_scan_entry = is_decawave_scan_entry
self.devicelist = deca.scan_for_decawave_devices() # scan_for_decawave_devices()
anchor_devices = {}
tag_devices = {}
for k, dev in self.devicelist.items():
if dev is not None:
for x in range(4):
try:
data = deca.get_data(dev)
if data["operation_mode_data"]["device_type"] == 0:
tag_devices[k] = dev
elif data["operation_mode_data"]["device_type"] == 1:
anchor_devices[k] = dev
break
except Ice.Exception as e:
traceback.print_exc()
print(e)
break
except (KeyboardInterrupt, SystemExit, SystemError) as e:
traceback.print_exc()
print(e)
break
except:
continue
if len(tag_devices) > 1:
self.devicelist = tag_devices
print("Found ", len(self.devicelist), " devices")
else:
print("There's no tag devices connected")
self.t_initialize_to_finalize.emit()
self.t_initialize_to_compute.emit()
print('SpecificWorker.compute...')
#
# sm_compute
#
@QtCore.Slot()
def sm_compute(self):
self.compute()
#
# sm_finalize
#
@QtCore.Slot()
def sm_finalize(self):
print("Entered state finalize")
# QApplication.quit()
# =================================================================
# =================================================================
|
blc56/PlanetWoo
|
tiletree/multi.py
|
Python
|
gpl-3.0
| 4,546
| 0.031236
|
#Copyright (C) 2012 Excensus, LLC.
#
#This file is part of PlanetWoo.
#
#PlanetWoo is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option)
|
any later version.
#
#PlanetWoo is distributed in th
|
e hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with PlanetWoo. If not, see <http://www.gnu.org/licenses/>.
##\file __init__.py Main classes for the tiletree.multi_render module.
import copy
import planetwoo.tiletree as tiletree
class TileInfoCache:
def __init__(self, name):
self.name = name
self.cache = {}
def clear(self):
self.cache = {}
def add_node_info(self, node_id, info_dict):
self.cache[node_id] = info_dict
def get_node_info(self, node_id):
return self.cache.get(node_id, None)
class MultiGeom:
def __init__(self, num_layers, parent_geom=None):
self.geoms = [None] * num_layers
self.leaf_reached = [False] * num_layers
if(parent_geom):
self.leaf_reached = copy.copy(parent_geom.leaf_reached)
class MultiCutter(tiletree.NullGeomCutter):
def __init__(self, cutters):
self.cutters = cutters
def cut(self, min_x, min_y, max_x, max_y, parent_geom=None):
if(parent_geom == None):
parent_geom = MultiGeom(len(self.cutters), parent_geom)
result = MultiGeom(len(self.cutters), parent_geom)
result.geoms = [c.cut(min_x, min_y, max_x, max_y, p) for c,p in zip(self.cutters, parent_geom.geoms)]
return result
class MultiRenderer:
def __init__(self, renderers):
self.renderers = renderers
self.tile_info_caches = {}
for renderer in self.renderers:
if(renderer.info_cache_name != None):
cache = self.tile_info_caches.setdefault(renderer.info_cache_name, TileInfoCache(renderer.info_cache_name))
renderer.set_info_cache(cache)
def tile_info(self, node, check_full=True):
is_blank = True
is_full = True
is_leaf = True
if(node.geom == None):
node.geom = MultiGeom(len(self.renderers))
r_iter = -1
for renderer in self.renderers:
r_iter += 1
if(node.geom.leaf_reached == 'blank'):
is_full = False
continue
elif(node.geom.leaf_reached == 'full'):
is_blank = False
continue
tmp_node = copy.copy(node)
tmp_node.geom = node.geom.geoms[r_iter]
renderer.tile_info(tmp_node, check_full)
if(not tmp_node.is_blank):
is_blank = False
if(not tmp_node.is_leaf):
is_leaf = False
if(not tmp_node.is_full):
is_full = False
node.label_geoms = tmp_node.label_geoms
node.is_blank = is_blank
node.is_full = is_full
node.is_leaf = is_leaf
def render(self, node):
is_blank = True
is_full = True
is_leaf = True
img_ids = []
img_bytes = []
if(node.geom == None):
node.geom = MultiGeom(len(self.renderers))
r_iter = -1
for renderer in self.renderers:
r_iter += 1
if(node.geom.leaf_reached[r_iter] != False):
img_ids.append(None)
img_bytes.append(None)
continue
tmp_node = copy.copy(node)
tmp_node.geom = node.geom.geoms[r_iter]
this_id, this_bytes = renderer.render(tmp_node)
img_ids.append(this_id)
img_bytes.append(this_bytes)
if(not tmp_node.is_blank):
is_blank = False
if(not tmp_node.is_leaf):
is_leaf = False
if(not tmp_node.is_full):
is_full = False
if(tmp_node.is_blank and tmp_node.is_leaf):
node.geom.leaf_reached[r_iter] = 'blank'
if(tmp_node.is_full and tmp_node.is_leaf):
node.geom.leaf_reached[r_iter] = 'full'
node.label_geoms = tmp_node.label_geoms
node.is_blank = is_blank
node.is_full = is_full
node.is_leaf = is_leaf
node.image_id = img_ids
#now that we have rendered this node, clear the tile info caches
for cache in self.tile_info_caches.values():
cache.clear()
return (node.image_id, img_bytes)
class MultiStorageManager:
def __init__(self, storage_managers):
self.storage_managers = storage_managers
def store(self, node, img_bytes):
s_iter = -1
for storage_manager in self.storage_managers:
s_iter += 1
if(img_bytes[s_iter] == None):
continue
tmp_node = copy.copy(node)
tmp_node.image_id = node.image_id[s_iter]
storage_manager.store(tmp_node, img_bytes[s_iter])
def flush(self):
for storage_manager in self.storage_managers:
storage_manager.flush()
|
newera912/WeatherTransportationProject
|
target/classes/edu/albany/cs/transWeatherPy/plotMesonetOrgData.py
|
Python
|
gpl-2.0
| 43,328
| 0.03882
|
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.lines import lineStyles
Light_cnames={'mistyrose':'#FFE4E1','navajowhite':'#FFDEAD','seashell':'#FFF5EE','papayawhip':'#FFEFD5','blanchedalmond':'#FFEBCD','white':'#FFFFFF','mintcream':'#F5FFFA','antiquewhite':'#FAEBD7','moccasin':'#FFE4B5','ivory':'#FFFFF0','lightgoldenrodyellow':'#FAFAD2','lightblue':'#ADD8E6','floralwhite':'#FFFAF0','ghostwhite':'#F8F8FF','honeydew':'#F0FFF0','linen':'#FAF0E6','snow':'#FFFAFA','lightcyan':'#E0FFFF','cornsilk':'#FFF8DC','bisque':'#FFE4C4','aliceblue':'#F0F8FF','gainsboro':'#DCDCDC','lemonchiffon':'#FFFACD','lightyellow':'#FFFFE0','lavenderblush':'#FFF0F5','whitesmoke':'#F5F5F5','beige':'#F5F5DC','azure':'#F0FFFF','oldlace':'#FDF5E6'}
def plot10seperate():
mons=["201603","201604","201605","201606","201607","201608","201609","201610","201611","201612","201701","201702","201703","201704","201705","201706"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
rootpath="F:/workspace/git/TranWeatherProject/data/mesonet_data/"
for mon in mons:
for day in days:
print mon+day
fileName=rootpath+mon+day+".txt"
day_data=[]
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data.append((sta_name,mon+day,data))
X=[(i*5.0/60.0) for i in range(1,len(day_data[0][2]),1)]
fig=plt.figure(1)
fig.add_subplot(10,1,1)
plt.plot(X,day_data[0][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[0][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,2)
plt.plot(X,day_data[1][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[1][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,3)
plt.plot(X,day_data[2][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fonts
|
ize=8)
plt.title(day_data[2][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,4)
plt.plot(X,day_data[3][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_t
|
o_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[3][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,5)
plt.plot(X,day_data[4][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[4][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,6)
plt.plot(X,day_data[5][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[5][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,7)
plt.plot(X,day_data[6][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[6][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,8)
plt.plot(X,day_data[7][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[7][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,9)
plt.plot(X,day_data[8][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period From 00:00am ~23:59')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[8][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,10)
plt.plot(X,day_data[9][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[9][0]+" Station Date: "+mon+day +"Temperature")
plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
plt.show()
fig.savefig('F:/workspace/git/TranWeatherProject/outputs/mesonetPlots/'+str(mon+day)+'.png')
plt.close()
import os
def plotSignle():
mons=["201603","201604","201605","201606","201607","201608","201609"]
#mons=["201604"]
#mons=["201609"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
#days=[""]
sta_names={0:"BATA",1:"SBRI",2:"WATE",3:"JORD",4:"CSQR",5:"WEST",6:"COLD",7:"SPRA",8:"COBL",9:"STEP"}
var_type="precip"
rootpath="F:/workspace/git/Graph-MP/data/mesonet_data/"+var_type+"/"
for
|
sunqm/mpi4pyscf
|
mpi4pyscf/mp/__init__.py
|
Python
|
gpl-3.0
| 40
| 0
|
from . imp
|
ort mp2
from .mp2 import R
|
MP2
|
DailyActie/Surrogate-Model
|
01-codes/scikit-learn-master/examples/cluster/plot_kmeans_stability_low_dim_dense.py
|
Python
|
mit
| 4,324
| 0.001619
|
"""
============================================================
Empirical evaluation of the impact of k-means initialization
============================================================
Evaluate the ability of k-means initializations strategies to make
the algorithm convergence robust as measured by the relative standard
deviation of the inertia of the clustering (i.e. the sum of distances
to the nearest cluster center).
The first plot shows the best inertia reached for each combination
of the model (``KMeans`` or ``MiniBatchKMeans``) and the init method
(``init="random"`` or ``init="kmeans++"``) for increasing values of the
``n_init`` parameter that controls the number of initializations.
The second plot demonstrate one single run of the ``MiniBatchKMeans``
estimator using a ``init="random"`` and ``n_init=1``. This run leads to
a bad convergence (local optimum) with estimated centers stuck
between ground truth clusters.
The dataset used for evaluation is a 2D grid of isotropic Gaussian
clusters widely spaced.
"""
print(__doc__)
# Author: Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import numpy as np
from sklearn.cluster import KMeans
from sklearn.cluster import MiniBatchKMeans
from sklearn.utils import check_random_state
from sklearn.utils import shuffle
random_state = np.random.RandomState(0)
# Number of run (with randomly generated dataset) for each strategy so as
# to be able to compute an estimate of the standard deviation
n_runs = 5
# k-means models can do several random inits so as to be able to trade
# CPU time for convergence robustness
n_init_range = np.array([1, 5, 10, 15, 20])
# Datasets generation parameters
n_samples_per_center = 100
grid_size = 3
scale = 0.1
n_clusters = grid_size ** 2
def make_data(random_state, n_samples_per_center, grid_size, scale):
random_state = check_random_state(random_state)
centers = np.array([[i, j]
for i in range(grid_size)
for j in range(grid_size)])
n_clusters_true, n_features = centers.shape
noise = random_state.normal(
scale=scale, size=(n_samples_per_center, centers.shape[1]))
X = np.concatenate([c + noise for c in centers])
y =
|
np.concatenate([[i] * n_samples_per_center
for i in range(n_clusters_true)])
return shuffle(X, y, random_state=random_state)
# Part 1: Quantitative evaluation of various init methods
fig = plt.figure()
plots = []
legends = []
cases = [
(KMeans, 'k-means++', {}),
(KMe
|
ans, 'random', {}),
(MiniBatchKMeans, 'k-means++', {'max_no_improvement': 3}),
(MiniBatchKMeans, 'random', {'max_no_improvement': 3, 'init_size': 500}),
]
for factory, init, params in cases:
print("Evaluation of %s with %s init" % (factory.__name__, init))
inertia = np.empty((len(n_init_range), n_runs))
for run_id in range(n_runs):
X, y = make_data(run_id, n_samples_per_center, grid_size, scale)
for i, n_init in enumerate(n_init_range):
km = factory(n_clusters=n_clusters, init=init, random_state=run_id,
n_init=n_init, **params).fit(X)
inertia[i, run_id] = km.inertia_
p = plt.errorbar(n_init_range, inertia.mean(axis=1), inertia.std(axis=1))
plots.append(p[0])
legends.append("%s with %s init" % (factory.__name__, init))
plt.xlabel('n_init')
plt.ylabel('inertia')
plt.legend(plots, legends)
plt.title("Mean inertia for various k-means init across %d runs" % n_runs)
# Part 2: Qualitative visual inspection of the convergence
X, y = make_data(random_state, n_samples_per_center, grid_size, scale)
km = MiniBatchKMeans(n_clusters=n_clusters, init='random', n_init=1,
random_state=random_state).fit(X)
fig = plt.figure()
for k in range(n_clusters):
my_members = km.labels_ == k
color = cm.spectral(float(k) / n_clusters, 1)
plt.plot(X[my_members, 0], X[my_members, 1], 'o', marker='.', c=color)
cluster_center = km.cluster_centers_[k]
plt.plot(cluster_center[0], cluster_center[1], 'o',
markerfacecolor=color, markeredgecolor='k', markersize=6)
plt.title("Example cluster allocation with a single random init\n"
"with MiniBatchKMeans")
plt.show()
|
mschmidt87/python-neo
|
setup.py
|
Python
|
bsd-3-clause
| 1,476
| 0.01355
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
import os
long_description = open("README.rst").read()
install_requires = ['numpy>=1.3.0',
'quantities>=0.9.0']
if os.environ.get('TRAVIS') == 'true' and \
os.environ.get('TRAVIS_PYTHON_VERSION').startswith('2.6'):
install_requires.append('unittest2>=0.5.1')
setup(
name = "neo",
version = '0.4.0dev',
packages = ['neo', 'neo.core', 'neo.io', 'neo.test', 'neo.test.iotest'],
install_requires=install_requires,
author = "Neo authors and contributors",
author_email = "sgarcia at olfac.univ-lyon1.fr",
description = "Neo is a package for representing electrophysiology data in Python, together with support for reading a wide range of neurophysiology file formats",
long_description = long_description,
license = "BSD-3-Clause",
url='http://neuralensemble.org/neo',
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
|
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
|
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Scientific/Engineering']
)
|
bigswitch/neutron
|
neutron/tests/unit/services/trunk/test_db.py
|
Python
|
apache-2.0
| 1,965
| 0
|
# Copyright 2016 Hewlett Packard Enterprise Development Company, LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron import context
from neutron.db import models_v2
from neutron.serv
|
ices.trunk import db
from neutron.services.trunk import exceptions
from neutron.tests.unit import testlib_api
class TrunkDBTestCase(testlib_api.SqlTestCase):
def setUp(self):
super(TrunkDBTestCase, self).setUp()
self.ctx = context.get_admin_context()
def _add_network(self, net_id):
with self.ctx.session.begin(subtransactions=True):
se
|
lf.ctx.session.add(models_v2.Network(id=net_id))
def _add_port(self, net_id, port_id):
with self.ctx.session.begin(subtransactions=True):
port = models_v2.Port(id=port_id,
network_id=net_id,
mac_address='foo_mac_%s' % port_id,
admin_state_up=True,
status='DOWN',
device_id='',
device_owner='')
self.ctx.session.add(port)
def test_create_trunk_raise_port_in_use(self):
self._add_network('foo_net')
self._add_port('foo_net', 'foo_port')
db.create_trunk(self.ctx, 'foo_port')
self.assertRaises(exceptions.TrunkPortInUse,
db.create_trunk,
self.ctx, 'foo_port')
|
IBMStreams/streamsx.topology
|
test/python/spl/tk17/opt/.__splpy/packages/streamsx/scripts/extract.py
|
Python
|
apache-2.0
| 18,964
| 0.008437
|
from __future__ import print_function
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2016,2017
import sys
import sysconfig
import inspect
if sys.version_info.major == 2:
import funcsigs
import imp
import glob
import os
import shutil
import argparse
import subprocess
import xml.etree.ElementTree as ET
import html
from streamsx.spl.spl import _OperatorType
from streamsx.spl.spl import _valid_op_parameter
############################################
# setup for function inspection
if sys.version_info.major == 3:
_inspect = inspect
else:
raise ValueError("Python version not supported.")
############################################
# Return the root of the com.ibm.streamsx.topology toolkit
def _topology_tk_dir():
dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
for _ in range(5):
dir = os.path.dirname(dir)
return dir
def replaceTokenInFile(file, token, value):
f = open(file,'r')
contents = f.read()
f.close()
newcontents = contents.replace(token, value)
f = open(file,'w')
f.write(newcontents)
f.close()
def _optype(opobj):
if hasattr(opobj, '__splpy_optype'):
return opobj.__splpy_optype
return None
def _opfile(opobj):
return opobj.__splpy_file
def _opstyle(opobj):
return opobj.__splpy_style
def _opcallable(opobj):
return opobj.__splpy_callable
def _opdoc(opobj):
return opobj.__splpy_docpy
_INFO_XML_TEMPLATE="""<?xml version="1.0" encoding="UTF-8"?>
<toolkitInfoModel
xmlns="http://www.ibm.com/xmlns/prod/streams/spl/toolkitInfo"
xmlns:common="http://www.ibm.com/xmlns/prod/streams/spl/common">
<identity>
<name>__SPLPY_TOOLKIT_NAME__</name>
<description>Automatic generated toolkit description file.</description>
<version>1.0.0</version>
<requiredProductVersion>4.0.1.0</requiredProductVersion>
</identity>
<dependencies/>
<resources>
<messageSet name="TopologySplpyResource">
<lang default="true">en_US/TopologySplpyResource.xlf</lang>
<lang>de_DE/TopologySplpyResource.xlf</lang>
<lang>es_ES/TopologySplpyResource.xlf</lang>
<lang>fr_FR/TopologySplpyResource.xlf</lang>
<lang>it_IT/TopologySplpyResource.xlf</lang>
<lang>ja_JP/TopologySplpyResource.xlf</lang>
<lang>ko_KR/TopologySplpyResource.xlf</lang>
<lang>pt_BR/TopologySplpyResource.xlf</lang>
<lang>ru_RU/TopologySplpyResource.xlf</lang>
<lang>zh_CN/TopologySplpyResource.xlf</lang>
<lang>zh_TW/TopologySplpyResource.xlf</lang>
</messageSet>
</resources>
</toolkitInfoModel>
"""
# Create SPL operator parameters from the Python class
# (functions cannot have parameters)
# The parameters are taken from the signature of
# the __init__ method. In the spirit of Python
# the default for non-annotated function parameters
# is to map to operator parameters that take any type
# with a cardinality of 1. If the function parameter
# has a default value, then the operator parameter is optional
_OP_PARAM_TEMPLATE ="""
<parameter>
<name>__SPLPY__PARAM_NAME__SPLPY__</name>
<description></description>
<optional>__SPLPY__PARAM_OPT__SPLPY__</optional>
<rewriteAllowed>true</rewriteAllowed>
<expressionMode>AttributeFree</expressionMode>
<type></type>
<cardinality>1</cardinality>
</parameter>"""
class _Extractor(object):
def __init__(self):
self._cmd_args = self._parse_cmd_args()
self._tk_dir = self._cmd_args.directory
def _parse_cmd_args(self):
cmd_parser = argparse.ArgumentParser(description='Extract SPL operators from decorated Python classes and functions.')
cmd_parser.add_argument('-i', '--directory', required=True,
help='Toolkit directory')
cmd_parser.add_argument('--make-toolkit', action='store_true',
help='Index toolkit using spl-make-toolkit')
cmd_parser.add_argument('-v', '--verbose', action='store_true',
help='Print more diagnostics')
return cmd_parser.parse_args()
def _make_namespace_dir(self, ns):
nsdir = os.path.join(self._tk_dir, ns)
if os.path.isdir(nsdir):
return nsdir
os.mkdir(nsdir)
return nsdir
def _make_operator_dir(self, nsdir, name):
oppath = os.path.join(nsdir, name)
if (os.path.isdir(oppath)):
shutil.rmtree(oppath)
os.mkdir(oppath)
return oppath
# Process python objects in a module looking for SPL operators
# dynm - introspection for the modeul
# module - module name
# ops - list of potential operators (functions)
def _process_operators(self, dynm, module, streams_python_file, ops):
for opname, opobj in ops:
if inspect.isbuiltin(opobj):
continue
if opname.startswith('spl'):
continue
optype = _optype(opobj)
if optype is None:
continue
if optype == _OperatorType.Ignore:
continue
if streams_python_file != _opfile(opobj):
continue
self._common_tuple_operator(dynm, module, opname, opobj)
def _copy_globalization_resources(self):
'''Copy the language resource files for python api functions
This function copies the TopologySplpy Resource files from Topology toolkit directory
into the impl/nl folder of the project.
Returns: the list with the copied locale strings'''
rootDir = os.path.join(_topology_tk_dir(), "impl", "nl")
languageList = []
for dirName in os.listdir(rootDir):
srcDir = os.path.join(_topology_tk_dir(), "impl", "nl", dirName)
if (os.path.isdir(srcDir)) and (dirName != "include"):
dstDir = os.path.join(self._tk_dir, "impl", "nl", dirName)
try:
print("Copy globalization resources " + dirName)
os.makedirs(dstDir)
except OSError as e:
if (e.errno == 17) and (os.path.isdir(dstDir)):
if self._cmd_args.verbose:
print("Directory", dstDir, "exists")
else:
raise
srcFile = os.path.join(srcDir, "TopologySplpyResource.xlf")
if os.path.isfile(srcFile):
res = shutil.copy2(srcFile, dstDir)
languageList.append(dirName)
if self._cmd_args.verbose:
|
print("Writ
|
ten: " + res)
return languageList
#
# module - module for operator
# opname - name of the SPL operator
# opobj - decorated object defining operator
#
def _common_tuple_operator(self, dynm, module, opname, opobj) :
if (not hasattr(dynm, 'spl_namespace')) and hasattr(dynm, 'splNamespace'):
ns = getattr(dynm, 'splNamespace')()
else:
ns = getattr(dynm, 'spl_namespace')()
print(ns + "::" + opname)
# Print the summary of the class/function
_doc = inspect.getdoc(opobj)
if _doc is not None:
_doc = str.splitlines(_doc)[0]
print(" ", _doc)
nsdir = self._make_namespace_dir(ns)
opdir = self._make_operator_dir(nsdir, opname)
self._copy_template_dir("common")
self._copy_template_dir("icons")
self._copy_python_dir("packages")
self._copy_python_dir("include")
self._copy_CGT(opdir, ns, opname, opobj)
self._write_config(dynm, opdir, module, opname, opobj)
def _create_op_parameters(self, opmodel_xml, name, opObj):
opparam_xml = ''
if _opcallable(opObj) == 'class':
pmds = init_sig = _inspect.signature(opObj.__init__).parameters
itpmds = iter(pmds)
# first argument to __init__ is self (instance ref)
next(itpmds)
for pn in itpmds:
pmd = pmds[pn]
_valid_op_parameter(pn)
px = _OP_PARAM_TEMPLATE
px = px.replace('__SPLPY__PARAM_NAME__SPLPY__
|
catapult-project/catapult
|
third_party/gsutil/gslib/vendored/boto/boto/cloudformation/__init__.py
|
Python
|
bsd-3-clause
| 2,185
| 0
|
# Copyright (c) 2010-2011 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010-2011, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copie
|
s of the So
|
ftware, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.cloudformation.connection import CloudFormationConnection
from boto.regioninfo import RegionInfo, get_regions, load_regions
from boto.regioninfo import connect
RegionData = load_regions().get('cloudformation')
def regions():
"""
Get all available regions for the CloudFormation service.
:rtype: list
:return: A list of :class:`boto.RegionInfo` instances
"""
return get_regions(
'cloudformation',
connection_cls=CloudFormationConnection
)
def connect_to_region(region_name, **kw_params):
"""
Given a valid region name, return a
:class:`boto.cloudformation.CloudFormationConnection`.
:param str region_name: The name of the region to connect to.
:rtype: :class:`boto.cloudformation.CloudFormationConnection` or ``None``
:return: A connection to the given region, or None if an invalid region
name is given
"""
return connect('cloudformation', region_name,
connection_cls=CloudFormationConnection, **kw_params)
|
jdereus/labman
|
labcontrol/gui/handlers/plate.py
|
Python
|
bsd-3-clause
| 9,784
| 0
|
# ----------------------------------------------------------------------------
# Copyright (c) 2017-, LabControl development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from itertools import chain
from tornado.web import authenticated, HTTPError
from tornado.escape import json_encode, json_decode
from labcontrol.gui.handlers.base import BaseHandler
from labcontrol.db.exceptions import LabControlUnknownIdError
from labcontrol.db.plate import PlateConfiguration, Plate
from labcontrol.db.composition import SampleComposition
from labcontrol.db.container import Well
from labcontrol.db.process import (
SamplePlatingProcess, GDNAExtractionProcess, LibraryPrep16SProcess,
LibraryPrepShotgunProcess, NormalizationProcess,
GDNAPlateCompressionProcess)
def _get_plate(plate_id):
"""Returns the plate object if it exists
Parameters
----------
plate_id : str
The plate id
Raises
------
HTTPError
404, if the plate doesn't exist
"""
plate_id = int(plate_id)
try:
plate = Plate(plate_id)
except LabControlUnknownIdError:
raise HTTPError(404, 'Plate %s doesn\'t exist' % plate_id)
return plate
class PlateSearchHandler(BaseHandler):
@authenticated
def get(self):
control_names = SampleComposition.get_control_samples()
self.render('plate_search.html',
control_names=json_encode(control_names))
@authenticated
def post(self):
plate_comment_keywords = self.get_argument("plate_comment_keywords")
well_comment_keywords = self.get_argument("well_comment_keywords")
operation = self.get_argument("operation")
sample_names = json_decode(self.get_argument('sample_names'))
res = {"data": [[p.id, p.external_id]
for p in Plate.search(samples=sample_names,
plate_notes=plate_comment_keywords,
well_notes=well_comment_keywords,
query_type=operation)]}
self.write(res)
class PlateListingHandler(BaseHandler):
@authenticated
def get(self):
self.render('plate_list.html')
class PlateListHandler(BaseHandler):
@authenticated
def get(self):
plate_type = self.get_argument('plate_type', None)
only_quantified = self.get_argument('only_quantified', False)
plate_type = (json_decode(plate_type)
if plate_type is not None else None)
only_quantified = True if only_quantified == 'true' else False
rows_list = [[p['plate_id'],
p['external_id'],
p['creation_timestamp'],
p['studies'] if p['studies'] is not None else []]
for p in Plate.list_plates(
plate_type, only_quantified=only_quantified,
include_study_titles=True)]
res = {"data": rows_list}
self.write(res)
def plate_map_handler_get_request(process_id):
plate_id = None
if process_id is not None:
try:
process = SamplePlatingProcess(process_id)
except L
|
abControlUnknownIdError:
raise HTTPError(404, reason="Plating process %s doesn't exist"
% process_id)
plate_id = process.plate.id
plate_confs = [[pc.id, pc.description, pc.num_rows, pc.num_columns]
for pc in PlateConfiguration.iter()
if 'plate map' not in p
|
c.description]
cdesc = SampleComposition.get_control_sample_types_description()
return {'plate_confs': plate_confs, 'plate_id': plate_id,
'process_id': process_id, 'controls_description': cdesc}
class PlateMapHandler(BaseHandler):
@authenticated
def get(self):
process_id = self.get_argument('process_id', None)
res = plate_map_handler_get_request(process_id)
self.render("plate.html", **res)
class PlateNameHandler(BaseHandler):
@authenticated
def get(self):
new_name = self.get_argument('new-name')
status = 200 if Plate.external_id_exists(new_name) else 404
self.set_status(status)
self.finish()
def plate_handler_patch_request(user, plate_id, req_op, req_path,
req_value, req_from):
"""Performs the patch operation on the plate
Parameters
----------
user: labcontrol.db.user.User
User performing the request
plate_id: int
The SamplePlatingProcess to apply the patch operation
req_op: string
JSON PATCH op parameter
req_path: string
JSON PATCH path parameter
req_value: string
JSON PATCH value parameter
req_from: string
JSON PATCH from parameter
Raises
------
HTTPError
400: If req_op is not a supported operation
400: If req_path is incorrect
"""
plate = _get_plate(plate_id)
if req_op == 'replace':
req_path = [v for v in req_path.split('/') if v]
if len(req_path) != 1:
raise HTTPError(400, 'Incorrect path parameter')
attribute = req_path[0]
if attribute == 'name':
plate.external_id = req_value
elif attribute == 'discarded':
plate.discarded = req_value
else:
raise HTTPError(404, 'Attribute %s not recognized' % attribute)
else:
raise HTTPError(400, 'Operation %s not supported. Current supported '
'operations: replace' % req_op)
class PlateHandler(BaseHandler):
@authenticated
def get(self, plate_id):
plate = _get_plate(plate_id)
# sorting is done in plate.duplicates
duplicates = [
[sample_info[0].row, sample_info[0].column, sample_info[1]]
for sample_info in chain.from_iterable(plate.duplicates.values())]
# sorting of wells has to be done here as they are in a dictionary
previous_plates = []
prev_plated = plate.get_previously_plated_wells()
well_ids = sorted([w.id for w in prev_plated.keys()])
for curr_well_id in well_ids:
curr_well = Well(curr_well_id)
curr_plates = prev_plated[curr_well]
# plates are sorted in plate id order in
# get_previously_plated_wells
previous_plates.append([
[curr_well.row, curr_well.column],
[{'plate_id': p.id, 'plate_name': p.external_id} for p in
curr_plates]])
# sorting is done in plate.unknown_samples
unknowns = [[well.row, well.column] for well in plate.unknown_samples]
# sorting is done in plate.quantification processes
quantitation_processes = [[q.id, q.personnel.name, q.date.strftime(
q.get_date_format()), q.notes] for q in
plate.quantification_processes]
plate_config = plate.plate_configuration
result = {'plate_id': plate.id,
'plate_name': plate.external_id,
'discarded': plate.discarded,
'plate_configuration': [
plate_config.id, plate_config.description,
plate_config.num_rows, plate_config.num_columns],
'notes': plate.notes,
'process_notes': plate.process.notes,
'studies': sorted(s.id for s in plate.studies),
'duplicates': duplicates,
'previous_plates': previous_plates,
'unknowns': unknowns,
'quantitation_processes': quantitation_processes}
self.write(result)
self.finish()
@authenticated
def patch(self, plate_id):
# Follows the JSON PATCH specification
# https://tools.ietf.org/html/rfc6902
req_op = self.get_argument('op')
req_path = self.get_argument('path')
req_value = self.ge
|
gersolar/stations
|
stations/api.py
|
Python
|
mit
| 2,047
| 0.020029
|
from stations.models import OpticFilter, Brand, Product, Device, SensorCalibration, Position, Station, Configuration, Measurement
from tastypie import fields
from tastypie.authentication import SessionAuthentication
from tastypie.resources import ModelResource
from tastypie_polymorphic import PolymorphicModelResource
class BrandResource(ModelResource):
class Meta(object):
queryset = Brand.objects.all()
resource_name = 'brand'
authentication = SessionAuthentication()
class ProductResource(ModelResource):
class Meta(object):
queryset = Product.objects.all()
resource_name = 'product'
authentication = SessionAuthentication()
class DeviceResource(PolymorphicModelResource):
class Meta(object):
queryset = Device.objects.all()
resource_name = 'device'
authentication = SessionAuthentication()
class OpticFilterResource(ModelResource):
class Meta(object):
queryset = OpticFilter.objects.all()
resource_name
|
= 'optic_filter'
authentication = SessionAuthentication()
class SensorCalibrationResource(ModelResource):
class Meta(object):
queryset = SensorCalibration.objects.all()
resource_na
|
me = 'sensor_calibration'
authentication = SessionAuthentication()
class StationResource(ModelResource):
materials = fields.ToManyField('PositionResource', 'coordinates', full=True)
class Meta(object):
queryset = Station.objects.all()
resource_name = 'station'
authentication = SessionAuthentication()
class PositionResource(ModelResource):
station = fields.ForeignKey(StationResource, 'station', full=True)
class Meta(object):
queryset = Position.objects.all()
resource_name = 'position'
authentication = SessionAuthentication()
class ConfigurationResource(ModelResource):
class Meta(object):
queryset = Configuration.objects.all()
resource_name = 'configuration'
authentication = SessionAuthentication()
class MeasurementResource(ModelResource):
class Meta(object):
queryset = Measurement.objects.all()
resource_name = 'measurement'
authentication = SessionAuthentication()
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/component/droid/shared_binary_load_lifter_droid_chassis.py
|
Python
|
mit
| 510
| 0.043137
|
#
|
### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/component/droid/shared_binary_load_lifter_droid_chassis.iff"
result.attribute_template_id = -1
result.stfName("craft_droid_ingredients_n","binary_load_lifter_droid_chassis")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return
|
result
|
jbenden/ansible
|
lib/ansible/modules/monitoring/logstash_plugin.py
|
Python
|
gpl-3.0
| 4,754
| 0.001472
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Loic Blot <loic.blot@unix-experience.fr>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: logstash_plugin
short_description: Manage Logstash plugins
description:
- Manages Logstash plugins.
version_added: "2.3"
author: Loic Blot (@nerzhul)
options:
name:
description:
- Install plugin with that name.
required: True
state:
description:
- Apply plugin state.
required: False
choices: ["present", "absent"]
default: present
plugin_bin:
description:
- Specify logstash-plugin to use for plugin management.
required: False
default: /usr/share/logstash/bin/logstash-plugin
proxy_host:
description:
- Proxy host to use during plugin installation.
required: False
default: None
proxy_port:
description:
- Proxy port to use during plugin installation.
required: False
default: None
version:
description:
- Specify plugin Version of the plugin to install.
If plugin exists with previous version, it will NOT be updated.
required: False
default: None
'''
EXAMPLES = '''
- name: Install Logstash beats input plugin
logstash_plugin:
state: present
name: logstash-input-beats
- name: Install specific version of a plugin
logstash_plugin:
state: present
name: logstash-input-syslog
version: '3.2.0'
- name: Uninstall Logstash plugin
logstash_plugin:
state: absent
name: logstash-filter-multiline
'''
from ansible.module_utils.basic import AnsibleModule
PACKAGE_STATE_MAP = dict(
present="install",
absent="remove"
)
def is_plugin_present(module, plugin_bin, plugin_name):
cmd_args = [plugin_bin, "list", plugin_name]
rc, out, err = module.run_command(" ".join(cmd_args))
return rc == 0
def parse_error(string):
reason = "reason: "
try:
return string[string.index(reason) + len(reason):].strip()
except ValueError:
return string
def install_plugin(module, plugin_bin, plugin_name, version, proxy_host, proxy_port):
cmd_args = [plugin_bin, PACKAGE_STATE_MAP["present"], plugin_name]
if version:
cmd_args.append("--version %s" % version)
if proxy_host and proxy_port:
cmd_args.append("-DproxyHost=%s -DproxyPort=%s" % (proxy_host, proxy_port))
cmd = " ".join(cmd_args)
if module.check_mode:
rc, out, err = 0, "check mode", ""
else:
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def remove_plugin(module, plugin_bin, plugin_name):
cmd_args = [plugin_bin, PACKAGE_STATE_MAP["absent"], plugin_name]
cmd = " ".join(cmd_args)
if module.check_mode:
rc, out, err
|
= 0, "check mode", ""
else:
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def main():
module = Ansible
|
Module(
argument_spec=dict(
name=dict(required=True),
state=dict(default="present", choices=PACKAGE_STATE_MAP.keys()),
plugin_bin=dict(default="/usr/share/logstash/bin/logstash-plugin", type="path"),
proxy_host=dict(default=None),
proxy_port=dict(default=None),
version=dict(default=None)
),
supports_check_mode=True
)
name = module.params["name"]
state = module.params["state"]
plugin_bin = module.params["plugin_bin"]
proxy_host = module.params["proxy_host"]
proxy_port = module.params["proxy_port"]
version = module.params["version"]
present = is_plugin_present(module, plugin_bin, name)
# skip if the state is correct
if (present and state == "present") or (state == "absent" and not present):
module.exit_json(changed=False, name=name, state=state)
if state == "present":
changed, cmd, out, err = install_plugin(module, plugin_bin, name, version, proxy_host, proxy_port)
elif state == "absent":
changed, cmd, out, err = remove_plugin(module, plugin_bin, name)
module.exit_json(changed=changed, cmd=cmd, name=name, state=state, stdout=out, stderr=err)
if __name__ == '__main__':
main()
|
luzheqi1987/nova-annotation
|
nova/tests/unit/api/openstack/compute/contrib/test_simple_tenant_usage.py
|
Python
|
apache-2.0
| 21,262
| 0.000282
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from lxml import etree
import mock
from oslo.serialization import jsonutils
from oslo.utils import timeutils
import webob
from nova.api.openstack.compute.contrib import simple_tenant_usage as \
simple_tenant_usage_v2
from nova.api.openstack.compute.plugins.v3 import simple_tenant_usage as \
simple_tenant_usage_v21
from nova.compute import flavors
from nova.compute import vm_states
from nova import context
from nova import db
from nova import exception
from nova import objects
from nova.openst
|
ack.common import policy as common_policy
from nova impo
|
rt policy
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova import utils
SERVERS = 5
TENANTS = 2
HOURS = 24
ROOT_GB = 10
EPHEMERAL_GB = 20
MEMORY_MB = 1024
VCPUS = 2
NOW = timeutils.utcnow()
START = NOW - datetime.timedelta(hours=HOURS)
STOP = NOW
FAKE_INST_TYPE = {'id': 1,
'vcpus': VCPUS,
'root_gb': ROOT_GB,
'ephemeral_gb': EPHEMERAL_GB,
'memory_mb': MEMORY_MB,
'name': 'fakeflavor',
'flavorid': 'foo',
'rxtx_factor': 1.0,
'vcpu_weight': 1,
'swap': 0,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'disabled': False,
'is_public': True,
'extra_specs': {'foo': 'bar'}}
def get_fake_db_instance(start, end, instance_id, tenant_id,
vm_state=vm_states.ACTIVE):
sys_meta = utils.dict_to_metadata(
flavors.save_flavor_info({}, FAKE_INST_TYPE))
# NOTE(mriedem): We use fakes.stub_instance since it sets the fields
# needed on the db instance for converting it to an object, but we still
# need to override system_metadata to use our fake flavor.
inst = fakes.stub_instance(
id=instance_id,
uuid='00000000-0000-0000-0000-00000000000000%02d' % instance_id,
image_ref='1',
project_id=tenant_id,
user_id='fakeuser',
display_name='name',
flavor_id=FAKE_INST_TYPE['id'],
launched_at=start,
terminated_at=end,
vm_state=vm_state,
memory_mb=MEMORY_MB,
vcpus=VCPUS,
root_gb=ROOT_GB,
ephemeral_gb=EPHEMERAL_GB,)
inst['system_metadata'] = sys_meta
return inst
def fake_instance_get_active_by_window_joined(context, begin, end,
project_id, host):
return [get_fake_db_instance(START,
STOP,
x,
"faketenant_%s" % (x / SERVERS))
for x in xrange(TENANTS * SERVERS)]
@mock.patch.object(db, 'instance_get_active_by_window_joined',
fake_instance_get_active_by_window_joined)
class SimpleTenantUsageTestV21(test.TestCase):
url = '/v2/faketenant_0/os-simple-tenant-usage'
alt_url = '/v2/faketenant_1/os-simple-tenant-usage'
policy_rule_prefix = "compute_extension:v3:os-simple-tenant-usage"
def setUp(self):
super(SimpleTenantUsageTestV21, self).setUp()
self.admin_context = context.RequestContext('fakeadmin_0',
'faketenant_0',
is_admin=True)
self.user_context = context.RequestContext('fakeadmin_0',
'faketenant_0',
is_admin=False)
self.alt_user_context = context.RequestContext('fakeadmin_0',
'faketenant_1',
is_admin=False)
def _get_wsgi_app(self, context):
return fakes.wsgi_app_v21(fake_auth_context=context,
init_only=('servers',
'os-simple-tenant-usage'))
def _test_verify_index(self, start, stop):
req = webob.Request.blank(
self.url + '?start=%s&end=%s' %
(start.isoformat(), stop.isoformat()))
req.method = "GET"
req.headers["content-type"] = "application/json"
res = req.get_response(self._get_wsgi_app(self.admin_context))
self.assertEqual(res.status_int, 200)
res_dict = jsonutils.loads(res.body)
usages = res_dict['tenant_usages']
for i in xrange(TENANTS):
self.assertEqual(int(usages[i]['total_hours']),
SERVERS * HOURS)
self.assertEqual(int(usages[i]['total_local_gb_usage']),
SERVERS * (ROOT_GB + EPHEMERAL_GB) * HOURS)
self.assertEqual(int(usages[i]['total_memory_mb_usage']),
SERVERS * MEMORY_MB * HOURS)
self.assertEqual(int(usages[i]['total_vcpus_usage']),
SERVERS * VCPUS * HOURS)
self.assertFalse(usages[i].get('server_usages'))
def test_verify_index(self):
self._test_verify_index(START, STOP)
def test_verify_index_future_end_time(self):
future = NOW + datetime.timedelta(hours=HOURS)
self._test_verify_index(START, future)
def test_verify_show(self):
self._test_verify_show(START, STOP)
def test_verify_show_future_end_time(self):
future = NOW + datetime.timedelta(hours=HOURS)
self._test_verify_show(START, future)
def _get_tenant_usages(self, detailed=''):
req = webob.Request.blank(
self.url + '?detailed=%s&start=%s&end=%s' %
(detailed, START.isoformat(), STOP.isoformat()))
req.method = "GET"
req.headers["content-type"] = "application/json"
res = req.get_response(self._get_wsgi_app(self.admin_context))
self.assertEqual(res.status_int, 200)
res_dict = jsonutils.loads(res.body)
return res_dict['tenant_usages']
def test_verify_detailed_index(self):
usages = self._get_tenant_usages('1')
for i in xrange(TENANTS):
servers = usages[i]['server_usages']
for j in xrange(SERVERS):
self.assertEqual(int(servers[j]['hours']), HOURS)
def test_verify_simple_index(self):
usages = self._get_tenant_usages(detailed='0')
for i in xrange(TENANTS):
self.assertIsNone(usages[i].get('server_usages'))
def test_verify_simple_index_empty_param(self):
# NOTE(lzyeval): 'detailed=&start=..&end=..'
usages = self._get_tenant_usages()
for i in xrange(TENANTS):
self.assertIsNone(usages[i].get('server_usages'))
def _test_verify_show(self, start, stop):
tenant_id = 0
req = webob.Request.blank(
self.url + '/faketenant_%s?start=%s&end=%s' %
(tenant_id, start.isoformat(), stop.isoformat()))
req.method = "GET"
req.headers["content-type"] = "application/json"
res = req.get_response(self._get_wsgi_app(self.user_context))
self.assertEqual(res.status_int, 200)
res_dict = jsonutils.loads(res.body)
usage = res_dict['tenant_usage']
servers = usage['server_usages']
self.assertEqual(len(usage['server_usages']), SERVERS)
|
cloudbase/neutron-virtualbox
|
neutron/tests/sub_base.py
|
Python
|
apache-2.0
| 5,363
| 0
|
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base test case for all tests.
To change behavoir only for tests that do not rely on Tempest, please
target the neutron.tests.base module instead.
There should be no non-test Neutron imports in this module to ensure
that the functional API tests can import Tempest without triggering
errors due to duplicate configuration definitions.
"""
import contextlib
import logging as std_logging
import os
import os.path
import random
import traceback
import eventlet.timeout
import fixtures
import mock
from oslo_utils import strutils
import testtools
from neutron.tests import post_mortem_debug
LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s"
def get_rand_name(max_length=None, prefix='test'):
name = prefix + str(random.randint(1, 0x7fffffff))
return name[:max_length] if max_length is not None else name
def bool_from_env(key, strict=False, default=False):
value = os.environ.get(key)
return strutils.bool_from_string(value, strict=strict, default=default)
class SubBaseTestCase(testtools.TestCase):
def setUp(self):
super(SubBaseTestCase, self).setUp()
# Configure this first to ensure pm debugging support for setUp()
debugger = os.environ.get('OS_POST_MORTEM_DEBUGGER')
if debugger:
self.addOnException(post_mortem_debug.get_exception_handler(
debugger))
if bool_from_env('OS_DEBUG'):
_level = std_logging.DEBUG
else:
_level = std_logging.INFO
capture_logs
|
= bool_from_env('OS_LOG_CAPTURE')
if not capture_logs:
std_logging.basicConfig(format=LOG_FORMAT, level=_level)
self.log_fixture = self.useFixture(
fixtures.FakeLogger(
format=LOG_FORMAT,
level=_level,
nuke_handlers=capture_logs,
))
test_timeout
|
= int(os.environ.get('OS_TEST_TIMEOUT', 0))
if test_timeout == -1:
test_timeout = 0
if test_timeout > 0:
self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
# If someone does use tempfile directly, ensure that it's cleaned up
self.useFixture(fixtures.NestedTempfile())
self.useFixture(fixtures.TempHomeDir())
self.addCleanup(mock.patch.stopall)
if bool_from_env('OS_STDOUT_CAPTURE'):
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if bool_from_env('OS_STDERR_CAPTURE'):
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.addOnException(self.check_for_systemexit)
def check_for_systemexit(self, exc_info):
if isinstance(exc_info[1], SystemExit):
self.fail("A SystemExit was raised during the test. %s"
% traceback.format_exception(*exc_info))
@contextlib.contextmanager
def assert_max_execution_time(self, max_execution_time=5):
with eventlet.timeout.Timeout(max_execution_time, False):
yield
return
self.fail('Execution of this test timed out')
def assertOrderedEqual(self, expected, actual):
expect_val = self.sort_dict_lists(expected)
actual_val = self.sort_dict_lists(actual)
self.assertEqual(expect_val, actual_val)
def sort_dict_lists(self, dic):
for key, value in dic.iteritems():
if isinstance(value, list):
dic[key] = sorted(value)
elif isinstance(value, dict):
dic[key] = self.sort_dict_lists(value)
return dic
def assertDictSupersetOf(self, expected_subset, actual_superset):
"""Checks that actual dict contains the expected dict.
After checking that the arguments are of the right type, this checks
that each item in expected_subset is in, and matches, what is in
actual_superset. Separate tests are done, so that detailed info can
be reported upon failure.
"""
if not isinstance(expected_subset, dict):
self.fail("expected_subset (%s) is not an instance of dict" %
type(expected_subset))
if not isinstance(actual_superset, dict):
self.fail("actual_superset (%s) is not an instance of dict" %
type(actual_superset))
for k, v in expected_subset.items():
self.assertIn(k, actual_superset)
self.assertEqual(v, actual_superset[k],
"Key %(key)s expected: %(exp)r, actual %(act)r" %
{'key': k, 'exp': v, 'act': actual_superset[k]})
|
FIWARE-TMForum/business-ecosystem-charging-backend
|
src/wstore/ordering/models.py
|
Python
|
agpl-3.0
| 4,227
| 0.000237
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013 - 2016 CoNWeT Lab., Universidad Politécnica de Madrid
# This file belongs to the business-charging-backend
# of the Business API Ecosystem.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from djangotoolbox.fields import DictField, EmbeddedModelField, ListField
from wstore.models import Organization, Resource
from wstore.ordering.errors import OrderingError
class Offering(models.Model):
off_id = models.CharField(max_length=50, blank=True, null=True)
href = models.URLField()
owner_organization = models.ForeignKey(Organization)
name = models.CharField(max_length=200)
version = models.CharField(max_length=100)
description = models.CharField(max_length=1500)
is_digital = models.BooleanField(default=True)
asset = models.ForeignKey(Resource, null=True, blank=True)
is_open = models.BooleanField(default=False)
bundled_offerings = ListField()
class Charge(models.Model):
date = models.DateTimeField()
cost = models.CharField(max_length=100)
duty_free = models.CharField(max_length=100)
currency = models.CharField(max_length=3)
concept = models.CharField(max_length=100)
invoice = models.CharField(max_length=200)
class Contract(models.Model):
item_id = models.CharField(max_length=50)
product_id = models.CharField(max_length=50, blank=True, null=True)
offering = models.ForeignKey(Offering)
# Parsed version of the pricing model used to calculate charges
pricing_model = DictField()
# Date of the last charge to the customer
last_charge = models.DateTimeField(blank=True, null=True)
# List with the made charges
charges = ListField(EmbeddedModelField(Charge))
# Usage fields
correlation_number = models.IntegerField(default=0)
last_usage = models.DateTimeField(blank=True, null=True)
# Revenue sharing product class
revenue_class = models.CharField(max_length=15, blank=True, null=True)
suspended = models.BooleanField(default=False)
terminated = models.BooleanField(default=False)
class Payment(models.Model):
transactions = ListField()
concept = models.CharField(max_length=20)
free_contracts = ListField(EmbeddedModelField(Contract))
class Order(models.Model):
description = models.CharField(max_length=1500)
order_id = models.CharField(max_length=50)
customer = models.ForeignKey(User)
owner_or
|
ganization = models.ForeignKey(Organization, null=True, blank=True)
date = models.DateTimeField()
sales_ids = ListField()
state = models.CharField(max_length=50)
tax_address = DictField()
# List of contracts attached to the current order
contracts = ListField(EmbeddedModelField(Contract))
# Pending payment info used in asynchronous charges
pending_payment = EmbeddedModelField(Payment, null=T
|
rue, blank=True)
def get_item_contract(self, item_id):
# Search related contract
for c in self.contracts:
if c.item_id == item_id:
contract = c
break
else:
raise OrderingError('Invalid item id')
return contract
def get_product_contract(self, product_id):
# Search related contract
for c in self.contracts:
if c.product_id == product_id:
contract = c
break
else:
raise OrderingError('Invalid product id')
return contract
class Meta:
app_label = 'wstore'
|
Jumpscale/jumpscale_core8
|
lib/JumpScale/tools/cache/Cache.py
|
Python
|
apache-2.0
| 1,412
| 0.000708
|
from JumpScale import j
try:
import ujson as json
except:
import json
class CacheFactory:
def __init__(self):
self.__jslocation__ = "j.tools.cache"
def get(self, db, expiration=300):
"""
db is keyvalue stor to use
e.g. j.tools.cache.get(j.servers.kvs.getRedisStore(namespace="cache"))
"""
return Cache(db, expiration)
class Cache:
def __init
|
__(self, db, expiration=300):
self.db = db
self.expiration = expiration
self.redis = str(self.db).find("re
|
dis") != -1
def set(self, key, value):
tostore = {}
tostore["val"] = value
tostore["expire"] = j.data.time.getTimeEpoch() + self.expiration
data = json.dumps(tostore)
if self.redis:
self.db.set("cache", key, data)
else:
self.db.set("cache", key, data, expire=self.expiration)
def get(self, key):
"""
expire = bool, is true if expired
return (expire,value)
"""
data = self.db.get("cache", key)
if data is None:
return False, None
data = json.loads(data)
if data["expire"] < j.data.time.getTimeEpoch():
self.db.delete("cache", key)
return (True, data["val"])
else:
return (False, data["val"])
def delete(self, key):
data = self.db.delete("cache", key)
|
sunqm/mpi4pyscf
|
mpi4pyscf/dft/uks.py
|
Python
|
gpl-3.0
| 4,017
| 0.000498
|
#!/usr/bin/env python
import platform
import time
import numpy
from pyscf import lib
from pyscf.dft import uks
from mpi4pyscf.lib import logger
from mpi4pyscf.scf import uhf as mpi_uhf
from mpi4pyscf.dft import rks as mpi_rks
from mpi4pyscf.tools import mpi
comm = mpi.comm
rank = mpi.rank
@lib.with_doc(uks.get_veff.__doc__)
@mpi.parallel_call(skip_args=[1, 2, 3, 4], skip_kwargs=['dm_last', 'vhf_last'])
def get_veff(mf, mol=None, dm=None, dm_last=0, vhf_last=0, hermi=1):
t0 = (time.clock(), time.time())
mf.unpack_(comm.bcast(mf.pack()))
mol = mf.mol
ni = mf._numint
if mf.nlc != '':
raise NotImplementedError
omega, alpha, hyb = ni.rsh_and_hybrid_coeff(mf.xc, spin=m
|
ol.spin)
# Broadcast the large input arrays here.
if any(comm.allgather(dm is mpi.Message.SkippedArg)):
if rank == 0 and dm is None:
dm = mf.make_rd
|
m1()
dm = mpi.bcast_tagged_array(dm)
if any(comm.allgather(dm_last is mpi.Message.SkippedArg)):
dm_last = mpi.bcast_tagged_array(dm_last)
if any(comm.allgather(vhf_last is mpi.Message.SkippedArg)):
vhf_last = mpi.bcast_tagged_array(vhf_last)
ground_state = (dm.ndim == 3 and dm.shape[0] == 2)
if mf.grids.coords is None:
mpi_rks._setup_grids_(mf, dm[0]+dm[1])
t0 = logger.timer(mf, 'setting up grids', *t0)
if hermi == 2: # because rho = 0
n, exc, vxc = 0, 0, 0
else:
n, exc, vxc = ni.nr_uks(mol, mf.grids, mf.xc, dm)
n = comm.allreduce(n)
exc = comm.allreduce(exc)
vxc = mpi.reduce(vxc)
logger.debug(mf, 'nelec by numeric integration = %s', n)
t0 = logger.timer(mf, 'vxc', *t0)
if abs(hyb) < 1e-10 and abs(alpha) < 1e-10:
vk = None
if getattr(vhf_last, 'vj', None) is not None:
ddm = numpy.asarray(dm) - dm_last
ddm = ddm[0] + ddm[1]
vj = mf.get_j(mol, ddm, hermi)
vj += vhf_last.vj
else:
vj = mf.get_j(mol, dm[0]+dm[1], hermi)
vxc += vj
else:
if getattr(vhf_last, 'vk', None) is not None:
ddm = numpy.asarray(dm) - dm_last
vj, vk = mf.get_jk(mol, ddm, hermi)
vj = vj[0] + vj[1]
vk *= hyb
if abs(omega) > 1e-10:
vklr = mf.get_k(mol, ddm, hermi, omega=omega)
vk += vklr * (alpha - hyb)
ddm = None
vj += vhf_last.vj
vk += vhf_last.vk
else:
vj, vk = mf.get_jk(mol, dm, hermi)
vj = vj[0] + vj[1]
vk *= hyb
if abs(omega) > 1e-10:
vklr = mf.get_k(mol, dm, hermi, omega=omega)
vk += vklr * (alpha - hyb)
vxc += vj
vxc -= vk
if ground_state:
exc -=(numpy.einsum('ij,ji', dm[0], vk[0]) +
numpy.einsum('ij,ji', dm[1], vk[1])) * .5
if ground_state:
ecoul = numpy.einsum('ij,ji', dm[0]+dm[1], vj) * .5
else:
ecoul = None
vxc = lib.tag_array(vxc, ecoul=ecoul, exc=exc, vj=vj, vk=vk)
return vxc
@mpi.register_class
class UKS(uks.UKS, mpi_uhf.UHF):
get_jk = mpi_uhf.UHF.get_jk
get_j = mpi_uhf.UHF.get_j
get_k = mpi_uhf.UHF.get_k
@lib.with_doc(uks.UKS.get_veff.__doc__)
def get_veff(self, mol=None, dm=None, dm_last=0, vhf_last=0, hermi=1):
assert(mol is None or mol is self.mol)
return get_veff(self, None, dm, dm_last, vhf_last, hermi)
def pack(self):
return {'verbose': self.verbose,
'direct_scf_tol': self.direct_scf_tol,
'xc': self.xc,
'nlc': self.nlc,
'omega': self.omega,
'small_rho_cutoff': self.small_rho_cutoff, }
def dump_flags(self, verbose=None):
mpi_info = mpi.platform_info()
if rank == 0:
uks.UKS.dump_flags(self, verbose)
lib.logger.debug(self, 'MPI info (rank, host, pid) %s', mpi_info)
return self
|
Flamacue/pretix
|
src/tests/plugins/banktransfer/test_import.py
|
Python
|
apache-2.0
| 4,988
| 0.001405
|
from datetime import timedelta
from decimal import Decimal
import pytest
from bs4 import BeautifulSoup
from django.core.files.uploadedfile import SimpleUploadedFile
from django.utils.timezone import now
from pretix.base.models import (
Event, EventPermission, Item, Order, OrderPosition, Organizer, Quota, User,
)
from pretix.plugins.banktransfer.models import BankImportJob
from pretix.plugins.banktransfer.tasks import process_banktransfers
@pytest.fixture
def env():
o = Organizer.objects.create(name='Dummy', slug='dummy')
event = Event.objects.create(
organizer=o, name='Dummy', slug='dummy',
date_from=now(), plugins='pretix.plugins.banktransfer'
)
user = User.objects.create_user('dummy@dummy.dummy', 'dummy')
EventPermission.objects.create(user=user, event=event)
o1 = Order.objects.create(
code='1Z3AS', event=event,
status=Order.STATUS_PENDING,
datetime=now(), expires=now() + timedelta(days=10),
total=23, payment_provider='banktransfer'
)
o2 = Order.objects.create(
code='6789Z', event=event,
status=Order.STATUS_CANCELED,
datetime=now(), expires=now() + timedelta(days=10),
total=23, payment_provider='banktransfer'
)
Order.objects.create(
code='GS89Z', event=event,
status=Order.STATUS_CANCELED,
datetime=now(), expires=now() + timedelta(days=10),
total=23, payment_provider='banktransfer'
)
quota = Quota.objects.create(name="Test", size=2, event=event)
item1 = Item.objects.create(event=event, name="Ticket", default_price=23)
quota.items.add(item1)
OrderPosition.objects.create(order=o1, item=item1, variation=None, price=23)
return event, user, o1, o2
@pytest.mark.django_db
def test_import_csv_file(client, env):
client.login(email='dummy@dummy.dummy', password='dummy')
r = client.get('/control/event/dummy/dummy/banktransfer/import/')
assert r.status_code == 200
file = SimpleUploadedFile('file.csv', """
Buchungstag;Valuta;Buchungstext;Auftraggeber / Empfänger;Verwendungszweck;Betrag in EUR;
09.04.2015;09.04.2015;SEPA-Überweisung;Karl Kunde;Bestellung 2015ABCDE;23,00;
09.04.2015;09.04.2015;SEPA-Überweisung;Karla Kundin;Bestellung DUMMYFGHIJ;42,00;
09.04.2015;09.04.2015;SEPA-Überweisung;Karla Kundin;Bestellung DUMMY1234S;42,00;
09.04.2015;09.04.2015;SEPA-Überweisung;Karla Kundin;Bestellung DUMMY1234S;23,00;
09.04.2015;09.04.2015;SEPA-Überweisung;Karla Kundin;Bestellung DUMMY6789Z;23,00;
09.04.2015;09.04.2015;SEPA-Überweisung;Karla Kundin;Bestellung DUMMY65892;23,00;
""".encode("utf-8"), content_type="text/csv")
r = client.post('/control/event/dummy/dummy/banktransfer/import/', {
'file': file
})
doc = BeautifulSoup(r.content, "lxml")
assert r.status_code == 200
assert len(doc.select("input[name=date]")) > 0
data = {
'payer': [3],
'reference': [4],
'date': 1,
'amount': 5,
'cols': 7
}
for inp in doc.select("input[type=hidden]"):
data[inp.attrs['name']] = inp.attrs['value']
r = client.post('/control/event/dummy/dummy/banktransfer/import/', data)
assert '/job/' in r['Location']
@pytest.fixture
def job(env):
return BankImportJob.objects.create(event=env[0]).pk
@pytest.mark.django_db
def test_mark_paid(env, job):
process_banktransfers(env[0].pk, job, [{
'payer': 'Karla Kundin',
'reference': 'Bestellung DUMMY1234S',
'date': '2016-01-26',
'amount': '23.00'
}])
env[2].refresh_from_db()
assert env[2].status == Order.STATUS_PAID
@pytest.mark.django_db
def test_check_amount(env, job):
process_banktransfers(env[0].pk, job, [{
'payer': 'Karla Kundin',
'reference': 'Bestellung DUMMY1Z3AS',
'date': '2016-01-26',
'amount': '23.50'
}])
env[2].refresh_from_db()
assert env[2].status == Order.STATUS_PENDING
@pytest.mark.django_db
def test_ignore_canceled(env, job):
process_banktransfers(env[0].pk, job, [{
'payer': 'Karla Kundin',
'reference': 'Bestellung DUMMY6789Z',
'date': '2016-01-26',
'amount': '23.00'
}])
env[3].refresh_from_db()
assert env[3].status == Order.STATUS_CANCELED
@pytest.mark.django_db
def test_autocorrection(env, job):
process_banktransfers(env[0].pk, job, [{
'payer': 'Karla Kundin',
|
'reference': 'Bestellung DUMMY12345',
'amount': '23.00',
'date': '2016-01-26',
}])
env[2].refresh_from_db()
assert env[2].status == Order.STATUS_PAID
@pytest.mark.django_db
def test_huge_amount(env, j
|
ob):
env[2].total = Decimal('23000.00')
env[2].save()
process_banktransfers(env[0].pk, job, [{
'payer': 'Karla Kundin',
'reference': 'Bestellung DUMMY12345',
'amount': '23.000,00',
'date': '2016-01-26',
}])
env[2].refresh_from_db()
assert env[2].status == Order.STATUS_PAID
|
darenr/MOMA-Art
|
extract_concepts/concepts.py
|
Python
|
mit
| 2,123
| 0.021667
|
import sys
import codecs
from textblob import Blobber
from textblob.wordnet import Synset
from textblob.en.np_extractors import ConllExtractor
from collections import Counter
import re
from nltk.corpus import wordnet as wn
from nltk.corpus.reader import NOUN
import os
import string
import itertools
from nltk.corpus import stopwords
stoplist = stopwords.words('english')
stoplist.extend(stopwords.words('french'))
stoplist.extend(["cette", "made", "works", "image", "images", "les", "comme"])
stoplist.extend(["photograph", "photographer", "film", "untitled", "series", "artist"])
stoplist.extend(["photographs", "other", "like", "also", "said", "work", "one", "two", "three"])
stoplist.extend(list(string.ascii_lowercase))
def wn_synonyms(ss):
return [l.name().decode('utf-8') for l in ss.lemmas()]
def wn_expand(ss):
x= [wn_getword(ss)]
b = tb(ss.definition())
x.extend([t[0] for t in b.tags if t[1] in ['JJ', 'NN', 'NNS']])
return x
def wn_getword(ss):
return ss if isinstance(ss, basestring) else ss.name().decode('utf-8').split('.')[0]
def wn_make_synset(word):
if '.' in word:
return wn.synset(word)
else:
ss = wn.synsets(word, NOUN)
if ss:
return ss[0]
else:
return None
def contains_number(word):
return re.search(r'[0-9]', word)
def bad(word):
return contains_number(word) or word.lower() in stoplist or len(word) < 3
def extract_capitalized(text):
return list(set(re.findall(r'([A-Z][a-z]+(?=\s[A-Z])(?:\s[A-Z][a-z]+)+)', re.sub(r'\n', ' _ ', text))))
tb = Blobber(np_extractor=ConllExtractor())
if __name__ == "__main__":
for arg in sys.argv[1:]:
with codecs.open(arg, 'r', encoding='utf-8') as f:
text = f
|
.read()
b = tb(text)
step1 = [t[0] for t in b.tags if t[1] in ['JJ', 'NN', 'NNS'] and not bad(t[0])]
#step2 = [wn_make_synset(word) for word in step1 if wn_make_synset(word)]
|
#step3 = list(itertools.chain.from_iterable([wn_expand(ss) for ss in step2]))
print "\n"
print '=' *60
print arg
print '=' *60
print ' *', Counter(step1)
print ' *', extract_capitalized(text)
|
enkidulan/slidelint
|
src/slidelint/tests/checkers/font_size/TestCheckerFontSize.py
|
Python
|
apache-2.0
| 13,339
| 0.000075
|
"""
The *_font_gradient.pdf files have 16 slides that contain a different
sized text:
* the first slide contains 1/1 sized text - the text is as high as
the page
* the second slide contains 1/2 sized text - the text is twice smaller than
the page
* ...
* the 16th slide contains 1/16 sized text - the sixteen lines can be
fit in the page
The tests check:
1. whether help messages are provided
2. whether with default size value(1/6) the checks of 7..16 slides fail
3. whether with custom size value 1/7 the checks of 8..16 slides fail
4. whether with custom size value 1/10 the checks of 11..16 slides fail
5. whether with custom size value 1/16 the checks of all slides pass
Also this checking depends on the font type and its features.
"""
import os.path
import unittest
from testfixtures import compare
from slidelint.checkers import fontsize
here = os.path.dirname(os.path.abspath(__file__))
class TestFontSizeChecker(unittest.TestCase):
def test_checker_helpers(self):
compare(fontsize.main(msg_info='All'),
[dict(id='C1002',
msg_name='font-to-small',
msg='Font is to small: Text should take up a minimum '
'of 1/6th(by default) the page.',
help='Font is to small: Text should take up a minimum '
'of 1/6th(by default) the page.',
page='')])
compare(fontsize.main(msg_info=['C1002']),
[dict(id='C1002',
msg_name='font-to-small',
msg='Font is to small: Text should take up a minimum '
'of 1/6th(by default) the page.',
help='Font is to small: Text should take up a minimum '
'of 1/6th(by default) the page.',
page='')])
compare(fontsize.main(msg_info=['W8001']),
[])
def test_default(self):
for prefix in ('libreoffice', 'msoffice'):
target_file = os.path.join(
here, prefix+'_font_gradient.pdf')
rez = fontsize.main(target_file=target_file)
compare(rez,
[{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/6.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 7'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/6.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 8'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/6.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 9'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/6.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 10'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/6.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 11'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
|
' of 1/6.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 12'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by
|
default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/6.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 13'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/6.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 14'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/6.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 15'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/6.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 16'}])
def test_1_of_7(self):
for prefix in ('libreoffice', 'msoffice'):
target_file = os.path.join(
here, prefix+'_font_gradient.pdf')
rez = fontsize.main(target_file=target_file, min_page_ratio=7)
compare(rez,
[{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/7.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 8'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/7.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 9'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/7.0th the page.',
'msg_name': 'font-to-small',
'page': 'Slide 10'},
{'help': 'Font is to small: Text should take up a '
'minimum of 1/6th(by default) the page.',
'id': 'C1002',
'msg': 'Font is to small: Text should take up a minimum'
' of 1/7.0th the page.',
'msg_name': 'font-to-small',
|
ampax/edx-platform
|
lms/djangoapps/oauth_dispatch/tests/test_views.py
|
Python
|
agpl-3.0
| 9,406
| 0.001701
|
"""
Tests for Blocks Views
"""
import json
import ddt
from django.test import RequestFactory, TestCase
from django.core.urlresolvers import reverse
import httpretty
from student.tests.factories import UserFactory
from third_party_auth.tests.utils import ThirdPartyOAuthTestMixin, ThirdPartyOAuthTestMixinGoogle
from .constants import DUMMY_REDIRECT_URL
from .. import adapters
from .. import views
from . import mixins
class _DispatchingViewTestCase(TestCase):
"""
Base class for tests that exercise DispatchingViews.
"""
dop_adapter = adapters.DOPAdapter()
dot_adapter = adapters.DOTAdapter()
view_class = None
url = None
def setUp(self):
super(_DispatchingViewTestCase, self).setUp()
self.user = UserFactory()
self.dot_app = self.dot_adapter.create_public_client(
name='test dot application',
user=self.user,
redirect_uri=DUMMY_REDIRECT_URL,
client_id='dot-app-client-id',
)
self.dop_client = self.dop_adapter.create_public_client(
name='test dop client',
user=self.user,
redirect_uri=DUMMY_REDIRECT_URL,
client_id='dop-app-client-id',
)
def _post_request(self, user, client, token_type=None):
"""
Call the view with a POST request objectwith the appropriate format,
returning the response object.
"""
return self.client.post(self.url, self._post_body(user, client, token_type))
def _post_body(self, user, client, token_type=None):
"""
Return a dictionary to be used as the body of the POST request
"""
raise NotImplementedError()
@ddt.ddt
class TestAccessTokenView(mixins.AccessTokenMixin, _DispatchingViewTestCase):
"""
Test class for AccessTokenView
"""
view_class = views.AccessTokenView
url = reverse('access_token')
def _post_body(self, user, client, token_type=None):
"""
Return a dictionary to be used as the body of the POST request
"""
body = {
'client_id': client.client_id,
'grant_type': 'password',
'username': user.username,
'password': 'test',
}
if token_type:
body['token_type'] = token_type
return body
@ddt.data('dop_client', 'dot_app')
def test_access_token_fields(self, client_attr):
client = getattr(self, client_attr)
response = self._post_request(self.user, client)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertIn('access_token', data)
self.assertIn('expires_in', data)
self.assertIn('scope', data)
self.assertIn('token_type', data)
@ddt.data('dop_client', 'dot_app')
def test_jwt_access_token(self, client_attr):
client = getattr(self, client_attr)
response = self._post_request(self.user, client, token_type='jwt')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertIn('expires_in', data)
self.assertEqual(data['token_type'], 'JWT')
self.assert_valid_jwt_access_token(data['access_token'], self.user, data['scope'].split(' '))
def test_dot_access_token_provides_refresh_token(self):
response = self._post_request(self.user, self.dot_app)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertIn('refresh_token', data)
def test_dop_public_client_access_token(self):
response = self._post_request(self.user, self.dop_client)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertNotIn('refresh_token', data)
@ddt.ddt
@httpretty.activate
class TestAccessTokenExchangeView(ThirdPartyOA
|
uthTestMixinGoogle, ThirdPartyOAuthTestMixin, _DispatchingViewTestCase):
"""
Test class for AccessTokenExchangeView
"""
view_class = views.AccessTokenExchangeView
url = reverse('exchange_access_token', kwargs={'backend': 'google-oauth2'})
def _post_body(self, user, client, token_type=None):
return {
'client_id': client.client_id,
'
|
access_token': self.access_token,
}
@ddt.data('dop_client', 'dot_app')
def test_access_token_exchange_calls_dispatched_view(self, client_attr):
client = getattr(self, client_attr)
self.oauth_client = client
self._setup_provider_response(success=True)
response = self._post_request(self.user, client)
self.assertEqual(response.status_code, 200)
@ddt.ddt
class TestAuthorizationView(TestCase):
"""
Test class for AuthorizationView
"""
dop_adapter = adapters.DOPAdapter()
def setUp(self):
super(TestAuthorizationView, self).setUp()
self.user = UserFactory()
self.dop_client = self._create_confidential_client(user=self.user, client_id='dop-app-client-id')
def _create_confidential_client(self, user, client_id):
"""
Create a confidential client suitable for testing purposes.
"""
return self.dop_adapter.create_confidential_client(
name='test_app',
user=user,
client_id=client_id,
redirect_uri=DUMMY_REDIRECT_URL
)
def test_authorization_view(self):
self.client.login(username=self.user.username, password='test')
response = self.client.post(
'/oauth2/authorize/',
{
'client_id': self.dop_client.client_id, # TODO: DOT is not yet supported (MA-2124)
'response_type': 'code',
'state': 'random_state_string',
'redirect_uri': DUMMY_REDIRECT_URL,
},
follow=True,
)
self.assertEqual(response.status_code, 200)
# check form is in context and form params are valid
context = response.context_data # pylint: disable=no-member
self.assertIn('form', context)
self.assertIsNone(context['form']['authorize'].value())
self.assertIn('oauth_data', context)
oauth_data = context['oauth_data']
self.assertEqual(oauth_data['redirect_uri'], DUMMY_REDIRECT_URL)
self.assertEqual(oauth_data['state'], 'random_state_string')
class TestViewDispatch(TestCase):
"""
Test that the DispatchingView dispatches the right way.
"""
dop_adapter = adapters.DOPAdapter()
dot_adapter = adapters.DOTAdapter()
def setUp(self):
super(TestViewDispatch, self).setUp()
self.user = UserFactory()
self.view = views._DispatchingView() # pylint: disable=protected-access
self.dop_adapter.create_public_client(
name='',
user=self.user,
client_id='dop-id',
redirect_uri=DUMMY_REDIRECT_URL
)
self.dot_adapter.create_public_client(
name='',
user=self.user,
client_id='dot-id',
redirect_uri=DUMMY_REDIRECT_URL
)
def assert_is_view(self, view_candidate):
"""
Assert that a given object is a view. That is, it is callable, and
takes a request argument. Note: while technically, the request argument
could take any name, this assertion requires the argument to be named
`request`. This is good practice. You should do it anyway.
"""
_msg_base = u'{view} is not a view: {reason}'
msg_not_callable = _msg_base.format(view=view_candidate, reason=u'it is not callable')
msg_no_request = _msg_base.format(view=view_candidate, reason=u'it has no request argument')
self.assertTrue(hasattr(view_candidate, '__call__'), msg_not_callable)
args = view_candidate.func_code.co_varnames
self.assertTrue(args, msg_no_request)
self.assertEqual(args[0], 'request')
def _get_request(self, client_id):
"""
Return a request with the specified client_id in the body
"""
return RequestFactory().post('/', {'client_id': client
|
strahlex/machinekit
|
lib/python/gladevcp/gladebuilder.py
|
Python
|
lgpl-2.1
| 852
| 0.003521
|
#!/usr/bin/python2
# vim: sts=4 sw=4 et
import gtk
class GladeBuilder:
""" This is wrapper around Glade object that behaves just like gtk.Builder """
def __init__(self, glade):
self.glade = glade
def get_object(self, name):
return self.glade.get_widget(n
|
ame)
def get_objects(self):
return self.glade.get_widget_prefix("")
def connect_signals(self, *a, **kw):
self.glade.signal_autoconnect
|
(*a, **kw)
def widget_name(widget):
""" Helper function to retrieve widget name """
idname = None
if isinstance(widget, gtk.Buildable):
idname = gtk.Buildable.get_name(widget)
if idname is None and hasattr(widget, 'get_name'):
# XXX: Sometimes in Glade mode on HAL_VBox previous if is triggered
# but name is None.
return widget.get_name()
return idname
|
kparal/anaconda
|
pyanaconda/ui/tui/hubs/summary.py
|
Python
|
gpl-2.0
| 5,735
| 0.001918
|
# Summary text hub
#
# Copyright (C) 2012 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Martin Sivak <msivak@redhat.com>
# Jesse Keating <jkeating@redhat.com>
#
from pyanaconda.ui.lib.space import FileSystemSpaceChecker, DirInstallSpaceChecker
from pyanaconda.ui.tui.hubs import TUIHub
from pyanaconda.flags import flags
from pyanaconda.errors import CmdlineError
from pyanaconda.i18n import N_, _, C_
import sys
import time
import logging
log = logging.getLogger("anaconda")
class SummaryHub(TUIHub):
"""
.. inheritance-diagram:: SummaryHub
:parts: 3
"""
title = N_("Installation")
def __init__(self, app, data, storage, payload, instclass):
super(SummaryHub, self).__init__(app, data, storage, payload, instclass)
if not flags.dirInstall:
self._checker = FileSystemSpaceChecker(storage, payload)
else:
self._checker = DirInstallSpaceChecker(storage, payload)
def setup(self, environment="anaconda"):
should_schedule = TUIHub.setup(self, environment=environment)
if not should_schedule:
return False
if flags.automatedInstall:
sys.stdout.write(_("Starting automated install"))
sys.stdout.flush()
spokes = self._keys.values()
while not all(spoke.ready for spoke in spokes):
sys.stdout.write('.')
sys.stdout.flush()
time.sleep(1)
print('')
for spoke in spokes:
if spoke.changed:
spoke.execute()
return True
# override the prompt so that we can skip user input on kickstarts
# where all the data is in hand. If not in hand, do the actual prompt.
def prompt(self, args=None):
incompleteSpokes = [spoke for spoke in self._keys.values()
if spoke.mandatory and not spoke.completed]
# do a bit of final sanity checking, make sure pkg selection
# size < available fs space
if flags.automatedInstall:
if self._checker and not self._checker.check():
print(
|
self._checker.error_message)
if not incompleteSpokes:
self.close()
return None
if flags.ksprompt:
for spoke in incompleteSpokes:
log.info("kickstart installation stopped for info: %s", spoke.title)
else:
errtxt = _("The following mandatory spokes are not comp
|
leted:") + \
"\n" + "\n".join(spoke.title for spoke in incompleteSpokes)
log.error("CmdlineError: %s", errtxt)
raise CmdlineError(errtxt)
# override the default prompt since we want to offer the 'b' to begin
# installation option here
return _(" Please make your choice from above ['%(quit)s' to quit | '%(begin)s' to begin installation |\n '%(refresh)s' to refresh]: ") % {
# TRANSLATORS: 'q' to quit
'quit': C_('TUI|Spoke Navigation', 'q'),
# TRANSLATORS: 'b' to begin installation
'begin': C_('TUI|Spoke Navigation', 'b'),
# TRANSLATORS: 'r' to refresh
'refresh': C_('TUI|Spoke Navigation', 'r')
}
def input(self, args, key):
"""Handle user input. Numbers are used to show a spoke, the rest is passed
to the higher level for processing."""
try:
number = int(key)
self.app.switch_screen_with_return(self._keys[number])
return None
except (ValueError, KeyError):
# If we get a continue, check for unfinished spokes. If unfinished
# don't continue
# TRANSLATORS: 'b' to begin installation
if key == C_('TUI|Spoke Navigation', 'b'):
for spoke in self._spokes.values():
if not spoke.completed and spoke.mandatory:
print(_("Please complete all spokes before continuing"))
return False
# do a bit of final sanity checking, making sure pkg selection
# size < available fs space
if self._checker and not self._checker.check():
print(self._checker.error_message)
return False
if self.app._screens:
self.app.close_screen()
return True
# TRANSLATORS: 'c' to continue
elif key == C_('TUI|Spoke Navigation', 'c'):
# Kind of a hack, but we want to ignore if anyone presses 'c'
# which is the global TUI key to close the current screen
return False
else:
return super(SummaryHub, self).input(args, key)
|
nicko96/Chrome-Infra
|
appengine/findit/waterfall/masters.py
|
Python
|
bsd-3-clause
| 919
| 0.002176
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
_SUPPORTED_MASTERS = [
# Tree-closer.
'chromium',
'chromium.win',
'chromium.mac',
'chromium.linux',
'chromium.chrom
|
iumos',
'chromium.chrome',
'chromium.memory',
# Non-tree-closer.
]
# Expli
|
citly list unsupported masters. Additional work might be needed in order
# to support them.
_UNSUPPORTED_MASTERS = [
'chromium.lkgr', # Disable as results are not showed on Sheriff-o-Matic.
'chromium.gpu', # Disable as too many false positives.
'chromium.memory.fyi',
'chromium.gpu.fyi',
'chromium.webkit',
'chromium.perf',
]
def MasterIsSupported(master_name): # pragma: no cover.
"""Return True if the given master is supported, otherwise False."""
return master_name in _SUPPORTED_MASTERS
|
mikelum/pyspeckit
|
pyspeckit/spectrum/writers/txt_writer.py
|
Python
|
mit
| 1,770
| 0.011864
|
from __future__ import print_function
try:
import atpy
atpyOK = True
except ImportError:
atpyOK = False
# rewrite this garbage
class write_txt(object):
def __init__(self, Spectrum):
self.Spectrum = Spectrum
def write_data(self, clobber = True):
"""
Write all fit information to an ASCII file.
"""
fn = "{0}_fit.dat".format(self.Spectrum.fileprefix)
if not clobber:
i = 1
while os.path.exists(fn):
fn = "{0}_fit({1}).dat".format(self.Spectrum.fileprefix, i)
i += 1
with open(fn, 'w') as f:
# Print header
print("# Column 1: {0}".format("x-values"), file=f)
print("# Column 2: {0}".format("model spectrum"), file=f)
for i, element in enumerate(self.Spectrum.specfit.modelcomponents):
print("# Column {0}: model spectrum component {1}".format(i + 3, i + 1), file=f)
print("# Column {0}: residuals".format(i + 4), file=f)
|
print("", file=f)
components = zip(*self.Spectrum.specfit.modelcomponents)
for i, element in enumerate(self.Spectrum.specfit.model):
line = "{0:10}{1:10}".format(se
|
lf.Spectrum.xarr[self.Spectrum.specfit.gx1:self.Spectrum.specfit.gx2][i],
round(self.Spectrum.specfit.model[i], 5))
for j, component in enumerate(components[i]): line += "{0:10}".format(round(component, 5))
line += "{0:10}".format(round(self.Spectrum.specfit.residuals[i], 5))
print(line, file=f)
print("", file=f)
|
Intel-Corporation/tensorflow
|
tensorflow/python/ops/linalg/linear_operator_toeplitz.py
|
Python
|
apache-2.0
| 11,235
| 0.003115
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""`LinearOperator` acting like a Toeplitz matrix."""
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linalg_impl as linalg
from tensorflow.python.ops.linalg import linear_operator
from tensorflow.python.ops.linalg import linear_operator_circulant
from tensorflow.python.ops.linalg import linear_operator_util
from tensorflow.python.ops.signal import fft_ops
from tensorflow.python.util.tf_export import tf_export
__all__ = ["LinearOperatorToeplitz",]
@tf_export("linalg.LinearOperatorToeplitz")
@linear_operator.make_composite_tensor
class LinearOperatorToeplitz(linear_operator.LinearOperator):
"""`LinearOperator` acting like a [batch] of toeplitz matrices.
This operator acts like a [batch] Toeplitz matrix `A` with shape
`[B1,...,Bb, N, N]` for some `b >= 0`. The first `b` indices index a
batch member. For every batch index `(i1,...,ib)`, `A[i1,...,ib, : :]` is
an `N x N` matrix. This matrix `A` is not materialized, but for
purposes of broadcasting this shape will be relevant.
#### Description in terms of toeplitz matrices
Toeplitz means that `A` has constant diagonals. Hence, `A` can be generated
with two vectors. One represents the first column of the matrix, and the
other represents the first row.
Below is a 4 x 4 example:
```
A = |a b c d|
|e a b c|
|f e a b|
|g f e a|
```
#### Example of a Toeplitz operator.
```python
# Create a 3 x 3 Toeplitz operator.
col = [1., 2., 3.]
row = [1., 4., -9.]
operator = LinearOperatorToeplitz(col, row)
operator.to_dense()
==> [[1., 4., -9.],
[2., 1., 4.],
[3., 2., 1.]]
operator.shape
==> [3, 3]
operator.log_abs_determinant()
==> scalar Tensor
x = ... Shape [3, 4] Tensor
operator.matmul(x)
==> Shape [3, 4] Tensor
```
#### Shape compatibility
This operator acts on [batch] matrix with compatible shape.
`x` is a batch matrix with compatible shape for `matmul` and `solve` if
```
operator.shape = [B1,...,Bb] + [N, N], with b >= 0
x.shape = [C1,...,Cc] + [N, R],
and [C1,...,Cc] broadcasts with [B1,...,Bb] to [D1,...,Dd]
```
#### Matrix property hints
This `LinearOperator` is initialized with boolean flags of the form `is_X`,
for `X = non_singular, self_adjoint, positive_definite, square`.
These have the following meaning:
* If `is_X == True`, callers should expect the operator to have the
property `X`. This is a promise that should be fulfilled, but is *not* a
runtime assert. For example, finite floating point precision may result
in these promises being violated.
* If `is_X == False`, callers should expect the operator to not have `X`.
* If `is_X == None` (the default), callers should have no expectation either
way.
"""
def __init__(self,
col,
row,
is_non_singular=None,
is_self_adjoint=None,
is_positive_definite=None,
is_square=None,
name="LinearOperatorToeplitz"):
r"""Initialize a `LinearOperatorToeplitz`.
Args:
col: Shape `[B1,...,Bb, N]` `Tensor` with `b >= 0` `N >= 0`.
The first column of the operator. Allowed dtypes: `float16`, `float32`,
`float64`, `complex64`, `complex128`. Note that the first entry of
`col` is assumed to be the same as the first entry of `row`.
row: Shape `[B1,...,Bb, N]` `Tensor` with `b >= 0` `N >= 0`.
The first row of the operator. Allowed dtypes: `float16`, `float32`,
`float64`, `complex64`, `complex128`. Note that the first entry of
`row` is assumed to be the same as the first entry of `col`.
is_non_singular: Expect that this operator is non-singular.
is_self_adjoint: Expect that this operator is equal to its hermitian
transpose. If `diag.dtype` is real, this is auto-set to `True`.
is_positive_definite: Expect that this operator is positive definite,
meaning the quadratic form `x^H A x` has positive real part for all
nonzero `x`. Note that we do not require the operator to be
self-adjoint to be positive-definite. See:
https://en.wikipedia.org/wiki/Positive-definite_matrix#Extension_for_non-symmetric_matrices
is_square: Expect that this operator acts like square [batch] matrices.
name: A name for this `LinearOperator`.
"""
parameters = dict(
col=col,
row=row,
is_non_singular=is_non_singular,
is_self_adjoint=is_self_adjoint,
is_positive_definite=is_positive_definite,
is_square=is_square,
name=name
)
with ops.name_scope(name, values=[row, col]):
self._row = linear_operator_util.convert_nonref_to_tensor(row, name="row")
self._col = linear_operator_util.convert_nonref_to_tensor(col, name="col")
self._check_row_col(self._row, self._col)
if is_square is False: # pylint:disable=g-bool-id-comparison
raise ValueError("Only square Toeplitz operators currently supported.")
is_square = True
super(LinearOperator
|
Toeplitz, self).__init__(
dtype=self._row.dtype,
is_non_singular=is_non_singular,
is_self_adjoint=is_self_adjoint,
is_positive_definite=is_positive_definite,
is_square=is_square,
parameters=parameters,
name=name)
self._set_graph_parents([self._row, self._col])
def _check_row_col(self, row, col):
"""Static check of row and column."""
for name, tensor in [["row", row], ["col", col]]:
if tensor.shape.nd
|
ims is not None and tensor.shape.ndims < 1:
raise ValueError("Argument {} must have at least 1 dimension. "
"Found: {}".format(name, tensor))
if row.shape[-1] is not None and col.shape[-1] is not None:
if row.shape[-1] != col.shape[-1]:
raise ValueError(
"Expected square matrix, got row and col with mismatched "
"dimensions.")
def _shape(self):
# If d_shape = [5, 3], we return [5, 3, 3].
v_shape = array_ops.broadcast_static_shape(
self.row.shape, self.col.shape)
return v_shape.concatenate(v_shape[-1:])
def _shape_tensor(self, row=None, col=None):
row = self.row if row is None else row
col = self.col if col is None else col
v_shape = array_ops.broadcast_dynamic_shape(
array_ops.shape(row),
array_ops.shape(col))
k = v_shape[-1]
return array_ops.concat((v_shape, [k]), 0)
def _assert_self_adjoint(self):
return check_ops.assert_equal(
self.row,
self.col,
message=("row and col are not the same, and "
"so this operator is not self-adjoint."))
# TODO(srvasude): Add efficient solver and determinant calculations to this
# class (based on Levinson recursion.)
def _matmul(self, x, adjoint=False, adjoint_arg=False):
# Given a Toeplitz matrix, we can embed it in a Circulant matrix to perform
# efficient matrix multiplications. Given a Toeplitz matrix with first row
# [t_0, t_1, ... t_{n-1}] and first column [t0, t_{-1}, ..., t_{-(n-1)},
# let C by the circulant matrix with first column [t0, t_{-1}, ...,
# t_{-(n-1)}, 0, t_{n-1}, ..., t_1]. Also adjoin to our input
|
wezs/uktils
|
uktils/third/aspn426123.py
|
Python
|
gpl-2.0
| 14,222
| 0.006961
|
#!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
################################################################################
#
# Method call parameters/return value type checking decorators.
# (c) 2006-2007, Dmitry Dvoinikov <dmitry@targeted.org>
# Distributed under BSD license.
#
# Samples:
#
# from typecheck import *
#
# @takes(int, str) # takes int, str, upon a problem throws InputParameterError
# @returns(int) # returns int, upon a problem throws ReturnValueError
# def foo(i, s):
# return i + len(s)
#
# @takes((int, long), by_regex("^[0-9]+$")) # int or long, numerical string
# def foo(i, s, anything): # and the third parameter is not checked
# ...
#
# @takes(int, int, foo = int, bar = optional(int)) # keyword argument foo must be int
# def foo(a, b, **kwargs): # bar may be int or missing
# ...
#
# Note: @takes for positional arguments, @takes for keyword arguments and @returns
# all support the same checker syntax, for example for the following declaration
#
# @takes(C)
# def foo(x):
# ...
#
# then C may be one of the simple checkers:
#
# --------- C --------- ------------- semantics -------------
# typename ==> ok if x is is an instance of typename
# "typename" ==> ok if x is is an instance of typename
# with_attr("a", "b") ==> ok if x has specific attributes
# some_callable ==> ok if some_callable(x) is True
# one_of(1, "2") ==> ok if x is one of the literal values
# by_regex("^foo$") ==> ok if x is a matching basestring
# nothing ==> ok if x is None
# anything ==> always ok
#
# simple checkers can further be combined with OR semantics using tuples:
#
# --------- C --------- ------------- semantics -------------
# (checker1, checker2) ==> ok if x conforms with either checker
#
# be optional:
#
# --------- C --------- ------------- semantics -------------
# optional(checker) ==> ok if x is checker-conformant or None
#
# or nested recursively into one of the following checkers
#
# --------- C --------- ------------- semantics -------------
# list_of(checker) ==> ok if x is a list of checker-conformant values
# tuple_of(checker) ==> ok if x is a tuple of checker-conformant values
# dict_of(key_checker, value_checker) ==> ok if x is a dict mapping key_checker-
# conformant keys to value_checker-conformant values
#
# More samples:
#
# class foo(object):
# @takes("foo", optional(int)) # foo, maybe int, but foo is yet incomplete
# def __init__(self, i = None): # and is thus specified by name
# ...
# @takes("foo", int) # foo, and int if presents in args,
# def bar(self, *args): # if args is empty, the check passes ok
# ...
# @takes("foo")
# @returns(object) # returns foo which is fine, because
# def biz(self): # foo is an object
# return self
# @classmethod # classmethod's and staticmethod's
# @takes(type) # go same way
# def baz(cls):
# ...
#
# @takes(int)
# @returns(optional("int", foo)) # returns either int, foo or NoneType
# def bar(i): # "int" (rather than just int) is for fun
# if i > 0:
# return i
# elif i == 0:
# return foo() # otherwise returns NoneType
#
# @takes(callable) # built-in functions are treated as predicates
# @returns(lambda x: x == 123) # and so do user-defined functions or lambdas
# def execute(f, *args, **kwargs):
# return f(*args, **kwargs)
#
# assert execute(execute, execute, execute, lambda x: x, 123) == 123
#
# def readable(x): # user-defined type-checking predicate
# return hasattr(x, "read")
#
# anything is an alias for predicate lambda: True,
# nothing is an alias for NoneType, as in:
#
# @takes(callable, readable, optional(anything), optional(int))
# @returns(nothing)
# def foo(f, r, x = None, i = None):
# ...
#
# @takes(with_attr("read", "write")) # another way of protocol checking
# def foo(pipe):
# ...
#
# @takes(list_of(int)) # list of ints
# def foo(x):
# print x[0]
#
# @takes(tuple_of(callable)) # tuple of callables
# def foo(x):
# print x[0]()
#
# @takes(dict_of(str, list_of(int))) # dict mapping strs to lists of int
# def foo(x):
# print sum(x["foo"])
#
# @takes(by_regex("^[0-9]{1,8}$")) # integer-as-a-string regex
# def foo(x):
# i = int(x)
#
# @takes(one_of(1, 2)) # must be equal to either one
# def set_version(version):
# ...
#
# The (3 times longer) source code with self-tests is available from:
# http://www.targeted.org/python/recipes/typecheck.py
#
################################################################################
__all__ = [ "takes", "InputParameterError", "returns", "ReturnValueError",
"optional", "nothing", "anything", "list_of", "tuple_of", "dict_of",
"by_regex", "with_attr", "one_of" ]
no_check = False # set this to True to turn all checks off
################################################################################
from inspect import getargspec, isfunction, isbuiltin, isclass
from types import NoneType
from re import compile as regex
################################################################################
def base_names(C):
"Returns list of base class names for a given class"
return [ x.__name__ for x in C.__mro__ ]
################################################################################
def type_name(v):
"Returns the name of the passed value's type"
return type(v).__name__
################################################################################
class Checker(object):
def __init__(self, reference):
self.reference = reference
def check(self, value): # abstract
pass
_registered = [] # a list of registered descendant class factories
@staticmethod
def create(value): # static factory method
for f, t in Checker._registered:
if f(value):
return t(value)
else:
return None
################################################################################
class TypeChecker(Checker):
def check(self, value):
return isinstance(value, self.reference)
Checker._registered.append((isclass, TypeChecker))
nothing = NoneType
################################################################################
class StrChecker(Checker):
def check(self, value):
value_base_names = base_names(type(value))
return self.reference in value_base_names or "instance" in value_base_names
Checker._registered.append((lambda x: isinstance(x, str), StrChecker))
########################
|
##################################################
|
######
class TupleChecker(Checker):
def __init__(self, reference):
self.reference = map(Checker.create, reference)
def check(self, value):
return reduce(lambda r, c: r or c.check(value), self.reference, False)
Checker._registered.append((lambda x: isinstance(x, tuple) and not
filter(lambda y: Checker.create(y) is None,
x),
TupleChecker))
optional = lambda *args: args + (NoneType, )
################################################################################
class FunctionChecker(Checker):
def check(self, value):
return self.reference(value)
Checker._registered.append((lambda x: isfunction(x) or isbuiltin(x),
FunctionChecker))
anything = lambda *args: True
################################################################################
class ListOfChecker(Checker):
def __init__(self, reference):
self.reference = Checker.create(reference)
def check(self, value):
return isinstance(value, list) and \
not filter(lambda e: not self.reference.check(e), value)
list_of = lambda *args: lambda value: ListOfChecker(*args).c
|
tgrogers/gpgpu-sim_simulations
|
util/plotting/correl_mappings.py
|
Python
|
bsd-2-clause
| 21,360
| 0.039326
|
# This file is eval'd inside the plot-correlation.py file
# This maps the named GPGPU-Sim config to the card name reported in the nvprof file.
# Every time you want to correlate a new configuration, you need to map it here.
config_maps = \
{
"PUB_TITANX": "TITAN X (Pascal)",
"TITANX_P102": "TITAN X (Pascal)",
"TITANX": "TITAN X (Pascal)",
"TITANXX": "TITAN X (Pascal)",
"TITANX_OLD": "TITAN X (Pascal)",
"TITANV": "TITAN V",
"TITANV_OLD": "TITAN V",
"3.x_PASCALTITANX" : "TITAN X (Pascal)",
"3.x_P100" : "Tesla P100",
"P100_HBM" : "Tesla P100",
"GTX480" : "GeForce GTX 480",
"GTX1080Ti" : "GeForce GTX 1080 Ti",
"TITANK" : "GeForce GTX TITAN",
"QV100" : "Quadro GV100",
"QV100_old" : "Quadro GV100",
"QV100_SASS" : "Quadro GV100",
"RTX2060" : "GeForce RTX 2060",
# "QV100" : "Tesla V100-SXM2-32GB",
# "QV100_old" : "Tesla V100-SXM2-32GB",
# "QV100_SASS" : "Tesla V100-SXM2-32GB"
}
# Every stat you want to correlate gets an entry here.
# For cycles, the math is different for every card so we have differnt stats baed on the hardware.
import collections
CorrelStat = collections.namedtuple('CorrelStat', 'chart_name hw_eval hw_error sim_eval hw_name plotfile drophwnumbelow plottype stattype')
correl_list = \
[
# 1200 MHz
CorrelStat(chart_name="Cycles",
plotfile="titanv-cycles",
hw_eval="np.average(hw[\"Duration\"])*1200",
hw_error="np.max(hw[\"Duration\"])*1200 - np.average(hw[\"Duration\"])*1200,"+\
"np.average(hw[\"Duration\"])*1200 - np.min(hw[\"Duration\"])*1200",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="TITAN V",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
# 1417 MHz
CorrelStat(chart_name="Cycles",
plotfile="titanx-p102-cycles",
hw_eval="np.average(hw[\"Duration\"])*1417",
hw_error="np.max(hw[\"Duration\"])*1417 - np.average(hw[\"Duration\"])*1417,"+\
"np.average(hw[\"Duration\"])*1417 - np.min(hw[\"Duration\"])*1417",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="TITAN X (Pascal)",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
# (1400 MHz - 16-wide SIMD)
CorrelStat(chart_name="Cycles",
plotfile="gtx480-cycles",
hw_eval="np.average(hw[\"Duration\"])*1400",
hw_error="np.max(hw[\"Duration\"])*1400 - np.average(hw[\"Duration\"])*1400,"+\
"np.average(hw[\"Duration\"])*1400 - np.min(hw[\"Duration\"])*1400",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])*2",
hw_name="GeForce GTX 480",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
# 1480 MHz
CorrelStat(chart_name="Cycles",
plotfile="p100-cycles",
hw_eval="np.average(hw[\"Duration\"])*1480",
hw_error="np.max(hw[\"Duration\"])*1480 - np.average(hw[\"Duration\"])*1480,"+\
"np.average(hw[\"Duration\"])*1480 - np.min(hw[\"Duration\"])*1480",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="Tesla P100",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
# 1480 MHz
CorrelStat(chart_name="Cycles",
plotfile="1080ti-cycles",
hw_eval="np.average(hw[\"Duration\"])*1480",
hw_error="np.max(hw[\"Duration\"])*1480 - np.average(hw[\"Duration\"])*1480,"+\
"np.average(hw[\"Duration\"])*1480 - np.min(hw[\"Duration\"])*1480",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="GeForce GTX 1080 Ti",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
# 1132 MHz
CorrelStat(chart_name="Cycles",
plotfile="gv100-cycles",
hw_eval="np.average(hw[\"Duration\"])*1132",
hw_error="np.max(hw[\"Duration\"])*1132 - np.average(hw[\"Duration\"])*1132,"+\
"np.average(hw[\"Duration\"])*1132 - np.min(hw[\"Duration\"])*1132",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="Quadro GV100",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
CorrelStat(chart_name="Cycles",
plotfile="qv100_sm_cycles",
hw_eval="np.average(hw[\"elapsed_cycles_sm\"])/80",
hw_error="np.max(hw[\"elapsed_cycles_sm\"])/80 - np.average(hw[\"elapsed_cycles_sm\"])/80,"+\
"np.average(hw[\"elapsed_cycles_sm\"])/80 - np.min(hw[\"elapsed_cycles_sm\"])/80",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="Quadro GV100",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
CorrelStat(chart_name="RTX 2060 SM Cycles",
plotfile="rtx2060_sm_cycles",
hw_eval="np.average(hw[\"gpc__cycles_elapsed.avg\"])",
hw_error="np.max(hw[\"gpc__cycles_elapsed.avg\"]) - np.average(hw[\"gpc__cycles_elapsed.avg\"]),"+\
"np.average(hw[\"gpc__cycles_elapsed.avg\"]) - np.min(hw[\"gpc__cycles_elapsed.avg\"])",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="GeForce RTX 2060",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
# 1455 MHz
CorrelStat(chart_name="Cycles",
plotfile="tv100-cycles",
hw_eval="np.average(hw[\"Duration\"])*1455",
hw_error="np.max(hw[\"Duration\"])*1455 - np.average(hw[\"Duration\"])*1455,"+\
"np.average(hw[\"Duration\"])*1455 - np.min(hw[\"Duration\"])*1455",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="Tesla V100-SXM2-32GB",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
CorrelStat(chart_name="TESLA V100 SM Cycles",
plotfile="tv100_sm_cycles",
hw_eval="np.average(hw[\"elapsed_cycles_sm\"])/80",
hw_error="np.max(hw[\"elap
|
sed_cycles_sm\"])/80 - np.average(hw[\"elapsed_cycles_sm\"])/80,"+\
"np.average(hw[\"elapsed_cycles_sm\"])/80 - np.min(hw[\"elapsed_cycles_sm\"])/80",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="Tesla V100-SXM2-32GB",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
#837 MHZ
CorrelStat(chart_name="Cycles",
plotfile="kepler-cycles",
hw_eval="np.average(hw[\"D
|
uration\"])*837",
hw_error="np.max(hw[\"Duration\"])*837 - np.average(hw[\"Duration\"])*837,"+\
"np.average(hw[\"Duration\"])*837 - np.min(hw[\"Duration\"])*837",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="GeForce GTX TITAN",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
CorrelStat(chart_name="TITAN KEPLER Cycles",
plotfile="kepler_sm_cycles",
hw_eval="np.average(hw[\"elapsed_cycles_sm\"])/14",
hw_error="np.max(hw[\"elapsed_cycles_sm\"])/14 - np.average(hw[\"elapsed_cycles_sm\"])/14,"+\
"np.average(hw[\"elapsed_cycles_sm\"])/14 - np.min(hw[\"elapsed_cycles_sm\"])/14",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="GeForce GTX TITAN",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
#Turing
CorrelStat(chart_name="TITAN TURING Cycles",
plotfile="turing_sm_cycles",
hw_eval="np.average(hw[\"gpc__cycles_elapsed.avg\"])",
hw_error="np.max(hw[\"gpc__cycles_elapsed.avg\"]) - np.average(hw[\"gpc__cycles_elapsed.avg\"]),"+\
"np.average(hw[\"gpc__cycles_elapsed.avg\"]) - np.min(hw[\"gpc__cycles_elapsed.avg\"])",
sim_eval="float(sim[\"gpu_tot_sim_cycle\s*=\s*(.*)\"])",
hw_name="GeForce RTX 2060",
drophwnumbelow=0,
plottype="log",
stattype="counter"
),
# Common, non-cycle stats for nvprof
CorrelStat(chart_name="Warp Instructions",
plotfile="warp-inst",
hw_eval="np.average(hw[\"inst_issued\"])",
hw_error=None,
sim_eval="float(sim[\"gpgpu_n_tot_w_icount\s*
|
Beckhoff/ADS
|
doc/source/conf.py
|
Python
|
mit
| 2,430
| 0.000412
|
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'adstool manual'
copyright = '2015 - 2021, Beckhoff Automation GmbH & Co. KG'
author = 'Patrick Brünn'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = ''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
# Add any paths that contain templates here, rel
|
ative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of su
|
pported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('adstool', 'adstool', 'adstool Documentation',[author], 1),
]
|
plotly/python-api
|
packages/python/plotly/plotly/validators/scattercarpet/marker/_opacitysrc.py
|
Python
|
mit
| 474
| 0.00211
|
import _plotly_utils.basevalidators
class OpacitysrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="opacitysrc", parent_name="scattercarpet.marker", **kwargs
):
super(OpacitysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
|
)
|
|
CSCI1200Course/csci1200OnlineCourse
|
tests/functional/modules_data_source_providers.py
|
Python
|
apache-2.0
| 14,954
| 0.000134
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for modules/data_source_providers/."""
__author__ = 'Mike Gainer (mgainer@google.com)'
import datetime
import actions
from common import utils
from controllers import sites
from models import courses
from models import models
from models import transforms
from tests.functional import actions
class CourseElementsTest(actions.TestBase):
def setUp(self):
super(CourseElementsTest, self).setUp()
sites.setup_courses('course:/test::ns_test, course:/:/')
self._course = courses.Course(
None, app_context=sites.get_all_courses()[0])
actions.login('admin@google.com', is_admin=True)
def test_assessments_schema(self):
response = transforms.loads(self.get(
'/test/rest/data/assessments/items').body)
self.assertIn('unit_id', response['schema'])
self.assertIn('title', response['schema'])
self.assertIn('weight', response['schema'])
self.assertIn('html_check_answers', response['schema'])
self.assertIn('properties', response['schema'])
def test_units_schema(self):
response = transforms.loads(self.get(
'/test/rest/data/units/items').body)
self.assertIn('unit_id', response['schema'])
self.assertIn('title', response['schema'])
self.assertIn('properties', response['schema'])
def test_lessons_schema(self):
response = transforms.loads(self.get(
'/test/rest/data/lessons/items').body)
self.assertIn('lesson_id', response['schema'])
self.assertIn('unit_id', response['schema'])
self.assertIn('title', response['schema'])
self.assertIn('scored', response['schema'])
self.assertIn('has_activity', response['schema'])
self.assertIn('activity_title', response['schema'])
def test_no_assessments_in_course(self):
response = transforms.loads(self.get(
'/test/rest/data/assessments/items').body)
self.assertListEqual([], response['data'])
def test_no_units_in_course(self):
response = transforms.loads(self.get(
'/test/rest/data/units/items').body)
self.assertListEqual([], response['data'])
def test_no_lessons_in_course(self):
response = transforms.loads(self.get(
'/test/rest/data/lessons/items').body)
self.assertListEqual([], response['data'])
def test_one_assessment_in_course(self):
title = 'Plugh'
weight = 123
html_check_answers = True
properties = {'a': 456, 'b': 789}
assessment1 = self._course.add_assessment()
as
|
sessment1.title = title
assessment1.weight = weight
ass
|
essment1.html_check_answers = html_check_answers
assessment1.properties = properties
self._course.save()
response = transforms.loads(self.get(
'/test/rest/data/assessments/items').body)
self.assertEquals(1, len(response['data']))
self.assertEquals(title, response['data'][0]['title'])
self.assertEquals(weight, response['data'][0]['weight'])
self.assertEquals(html_check_answers,
response['data'][0]['html_check_answers'])
self.assertEquals(properties, response['data'][0]['properties'])
def test_one_unit_in_course(self):
title = 'Plugh'
properties = {'a': 456, 'b': 789}
unit1 = self._course.add_unit()
unit1.title = title
unit1.properties = properties
self._course.save()
response = transforms.loads(self.get(
'/test/rest/data/units/items').body)
self.assertEquals(1, len(response['data']))
self.assertEquals(title, response['data'][0]['title'])
self.assertEquals(properties, response['data'][0]['properties'])
def test_one_lesson_in_course(self):
title = 'Plover'
scored = True
has_activity = True
activity_title = 'Xyzzy'
unit1 = self._course.add_unit()
lesson1 = self._course.add_lesson(unit1)
lesson1.title = title
lesson1.scored = scored
lesson1.has_activity = has_activity
lesson1.activity_title = activity_title
self._course.save()
response = transforms.loads(self.get(
'/test/rest/data/lessons/items').body)
self.assertEquals(1, len(response['data']))
self.assertEquals(unit1.unit_id, response['data'][0]['unit_id'])
self.assertEquals(title, response['data'][0]['title'])
self.assertEquals(scored, response['data'][0]['scored'])
self.assertEquals(has_activity, response['data'][0]['has_activity'])
self.assertEquals(activity_title, response['data'][0]['activity_title'])
def test_unit_and_assessment(self):
self._course.add_assessment()
self._course.add_unit()
self._course.save()
response = transforms.loads(self.get(
'/test/rest/data/units/items').body)
self.assertEquals(1, len(response['data']))
self.assertEquals('New Unit', response['data'][0]['title'])
response = transforms.loads(self.get(
'/test/rest/data/assessments/items').body)
self.assertEquals(1, len(response['data']))
self.assertEquals('New Assessment', response['data'][0]['title'])
def test_stable_ids(self):
self._course.add_assessment()
unit2 = self._course.add_unit()
self._course.add_assessment()
self._course.add_unit()
self._course.add_assessment()
self._course.add_unit()
self._course.add_assessment()
self._course.add_unit()
self._course.add_assessment()
self._course.add_unit()
self._course.add_assessment()
self._course.add_assessment()
self._course.add_assessment()
self._course.add_unit()
self._course.save()
response = transforms.loads(self.get(
'/test/rest/data/units/items').body)
self.assertListEqual([2, 4, 6, 8, 10, 14],
[u['unit_id'] for u in response['data']])
self._course.delete_unit(unit2)
self._course.save()
response = transforms.loads(self.get(
'/test/rest/data/units/items').body)
self.assertListEqual([4, 6, 8, 10, 14],
[u['unit_id'] for u in response['data']])
class StudentsTest(actions.TestBase):
def setUp(self):
super(StudentsTest, self).setUp()
sites.setup_courses('course:/test::ns_test, course:/:/')
self._course = courses.Course(
None, app_context=sites.get_all_courses()[0])
actions.login('admin@google.com', is_admin=True)
def test_students_schema(self):
response = transforms.loads(self.get(
'/test/rest/data/students/items').body)
self.assertNotIn('name', response['schema'])
self.assertNotIn('additional_fields', response['schema'])
self.assertIn('enrolled_on', response['schema'])
self.assertIn('user_id', response['schema'])
self.assertIn('is_enrolled', response['schema'])
self.assertIn('scores', response['schema'])
def test_no_students(self):
response = transforms.loads(self.get(
'/test/rest/data/students/items').body)
self.assertListEqual([], response['data'])
def test_one_student(self):
expected_enrolled_on = datetime.datetime.utcnow()
user_id = '123456'
is_enrolled = True
with utils.Namespace('ns_test'):
models.Student(user_id=user_id
|
nzlosh/st2
|
st2api/st2api/controllers/v1/traces.py
|
Python
|
apache-2.0
| 2,445
| 0.000818
|
# Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2api.controllers.resource import ResourceController
from st2common.models.api.trace import TraceAPI
from st2common.persistence.trace import Trace
from st2common.rbac.types import PermissionType
__all__ = ["TracesController"]
class TracesController(ResourceController):
model = TraceAPI
access = Trace
supported_filters = {
"trace_tag": "trace_tag",
"execution": "action_executions.object_id",
"rule": "rules.object_id",
"trigger_instance": "trigger_instances.object_id",
}
query_options = {"sort": ["-start_timestamp", "trace_tag"]}
def get_all(
self,
exclude_attributes=None,
include_attributes=None,
sort=None,
offset=0,
limit=None,
requester_user=None,
**raw_filters,
):
# Use a custom sort order when filtering on a timestamp so we return a correct result as
# expected by the user
query_options = None
if "sort_desc" in raw_filters and raw_filters["sort_desc"] == "True":
query_options = {"sort": ["-start_timestamp", "trace_tag"]}
elif "sort_asc" in raw_filters and raw_filters["sort_asc"] == "True":
query_options = {"sort": ["+start_timestamp", "trace_tag"]}
return self._get_all(
exclude_fields=exclude_attributes,
include_fields=include_attributes,
sort=sort,
offset=offset,
limit=limit,
query_options=query_options,
raw_filters=raw_filters,
requester_use
|
r=requester_user,
)
def get_one(self, id, requester_user):
return self._get_one_by_id(
id, requester_user=requester_user, permission_type=PermissionType.TRACE_VIEW
)
traces_controll
|
er = TracesController()
|
pschmitt/home-assistant
|
tests/components/directv/test_media_player.py
|
Python
|
apache-2.0
| 14,615
| 0.000479
|
"""The tests for the DirecTV Media player platform."""
from datetime import datetime, timedelta
from typing import Optional
from pytest import fixture
from homeassistant.components.directv.media_player import (
ATTR_MEDIA_CURRENTLY_RECORDING,
ATTR_MEDIA_RATING,
ATTR_MEDIA_RECORDED,
ATTR_MEDIA_START_TIME,
)
from homeassistant.components.media_player.const import (
ATTR_INPUT_SOURCE,
ATTR_MEDIA_ALBUM_NAME,
ATTR_MEDIA_ARTIST,
ATTR_MEDIA_CHANNEL,
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
ATTR_MEDIA_DURATION,
ATTR_MEDIA_ENQUEUE,
ATTR_MEDIA_POSITION,
ATTR_MEDIA_POSITION_UPDATED_AT,
ATTR_MEDIA_SERIES_TITLE,
ATTR_MEDIA_TITLE,
DOMAIN as MP_DOMAIN,
MEDIA_TYPE_MOVIE,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_TVSHOW,
SERVICE_PLAY_MEDIA,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_MEDIA_NEXT_TRACK,
SERVICE_MEDIA_PAUSE,
SERVICE_MEDIA_PLAY,
SERVICE_MEDIA_PREVIOUS_TRACK,
SERVICE_MEDIA_STOP,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
STATE_UNAVAILABLE,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util import dt as dt_util
from tests.async_mock import patch
from tests.components.directv import setup_integration
from tests.test_util.aiohttp import AiohttpClientMocker
ATTR_UNIQUE_ID = "unique_id"
CLIENT_ENTITY_ID = f"{MP_DOMAIN}.client"
MAIN_ENTITY_ID = f"{MP_DOMAIN}.host"
MUSIC_ENTITY_ID = f"{MP_DOMAIN}.music_client"
RESTRICTED_ENTITY_ID = f"{MP_DOMAIN}.restricted_client"
STANDBY_ENTITY_ID = f"{MP_DOMAIN}.standby_client"
UNAVAILABLE_ENTITY_ID = f"{MP_DOMAIN}.unavailable_client"
# pylint: disable=redefined-outer-name
@fixture
def mock_now() -> datetime:
"""Fixture for dtutil.now."""
return dt_util.utcnow()
async def async_turn_on(
hass: HomeAssistantType, entity_id: Optional[str] = None
) -> None:
"""Turn on specified media player or all."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(MP_DOMAIN, SERVICE_TURN_ON, data)
async def async_turn_off(
hass: HomeAssistantType, entity_id: Optional[str] = None
) -> None:
"""Turn off specified media player or all."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id el
|
se {}
await hass.services.async_call(MP_DOMAIN, SERVICE_TURN_OFF, data)
async def async_media_pause(
hass: HomeAssistantType, entity_id: Optional[str] = None
) -> None:
"""Send the media player the command for pause."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(MP_DOMAIN, SERVICE_
|
MEDIA_PAUSE, data)
async def async_media_play(
hass: HomeAssistantType, entity_id: Optional[str] = None
) -> None:
"""Send the media player the command for play/pause."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PLAY, data)
async def async_media_stop(
hass: HomeAssistantType, entity_id: Optional[str] = None
) -> None:
"""Send the media player the command for stop."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_STOP, data)
async def async_media_next_track(
hass: HomeAssistantType, entity_id: Optional[str] = None
) -> None:
"""Send the media player the command for next track."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_NEXT_TRACK, data)
async def async_media_previous_track(
hass: HomeAssistantType, entity_id: Optional[str] = None
) -> None:
"""Send the media player the command for prev track."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(MP_DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK, data)
async def async_play_media(
hass: HomeAssistantType,
media_type: str,
media_id: str,
entity_id: Optional[str] = None,
enqueue: Optional[str] = None,
) -> None:
"""Send the media player the command for playing media."""
data = {ATTR_MEDIA_CONTENT_TYPE: media_type, ATTR_MEDIA_CONTENT_ID: media_id}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
if enqueue:
data[ATTR_MEDIA_ENQUEUE] = enqueue
await hass.services.async_call(MP_DOMAIN, SERVICE_PLAY_MEDIA, data)
async def test_setup(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test setup with basic config."""
await setup_integration(hass, aioclient_mock)
assert hass.states.get(MAIN_ENTITY_ID)
assert hass.states.get(CLIENT_ENTITY_ID)
assert hass.states.get(UNAVAILABLE_ENTITY_ID)
async def test_unique_id(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test unique id."""
await setup_integration(hass, aioclient_mock)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
main = entity_registry.async_get(MAIN_ENTITY_ID)
assert main.unique_id == "028877455858"
client = entity_registry.async_get(CLIENT_ENTITY_ID)
assert client.unique_id == "2CA17D1CD30X"
unavailable_client = entity_registry.async_get(UNAVAILABLE_ENTITY_ID)
assert unavailable_client.unique_id == "9XXXXXXXXXX9"
async def test_supported_features(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test supported features."""
await setup_integration(hass, aioclient_mock)
# Features supported for main DVR
state = hass.states.get(MAIN_ENTITY_ID)
assert (
SUPPORT_PAUSE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY
== state.attributes.get("supported_features")
)
# Feature supported for clients.
state = hass.states.get(CLIENT_ENTITY_ID)
assert (
SUPPORT_PAUSE
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY
== state.attributes.get("supported_features")
)
async def test_check_attributes(
hass: HomeAssistantType,
mock_now: dt_util.dt.datetime,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test attributes."""
await setup_integration(hass, aioclient_mock)
state = hass.states.get(MAIN_ENTITY_ID)
assert state.state == STATE_PLAYING
assert state.attributes.get(ATTR_MEDIA_CONTENT_ID) == "17016356"
assert state.attributes.get(ATTR_MEDIA_CONTENT_TYPE) == MEDIA_TYPE_MOVIE
assert state.attributes.get(ATTR_MEDIA_DURATION) == 7200
assert state.attributes.get(ATTR_MEDIA_POSITION) == 4437
assert state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT)
assert state.attributes.get(ATTR_MEDIA_TITLE) == "Snow Bride"
assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) is None
assert state.attributes.get(ATTR_MEDIA_CHANNEL) == "{} ({})".format("HALLHD", "312")
assert state.attributes.get(ATTR_INPUT_SOURCE) == "312"
assert not state.attributes.get(ATTR_MEDIA_CURRENTLY_RECORDING)
assert state.attributes.get(ATTR_MEDIA_RATING) == "TV-G"
assert not state.attributes.get(ATTR_MEDIA_RECORDED)
assert state.attributes.get(ATTR_MEDIA_START_TIME) == datetime(
2020, 3, 21, 13, 0, tzinfo=dt_util.UTC
)
state = hass.states.get(CLIENT_ENTITY_ID)
assert state.state == STATE_PLAYING
assert state.attributes.get(ATTR_MEDIA_CONTENT_ID) == "4405732"
assert state.attributes.get(ATTR_MEDIA_CONTENT_TYPE) == MEDIA_TYPE_TVSHOW
assert state.attributes.get(ATTR_MEDIA_DURATION) == 1791
assert state.attributes.get(ATTR_MEDIA_POSITION) == 263
assert state.attributes.get(ATTR_MEDIA_POSITION_UPDATED_AT)
assert state.attributes.get(ATTR_MEDIA_TITLE) == "Tyler's Ultimate"
assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) == "Spaghetti
|
Ensembles/ert
|
python/python/ert/enkf/config/field_type_enum.py
|
Python
|
gpl-3.0
| 1,042
| 0.004798
|
# Copyright (C) 2016 Statoil ASA, Norway.
#
# This file is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the t
|
erms of the GNU General Public License a
|
s published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
from cwrap import BaseCEnum
class FieldTypeEnum(BaseCEnum):
TYPE_NAME = "field_type_enum"
ECLIPSE_RESTART = None
ECLIPSE_PARAMETER = None
GENERAL = None
UNKNOWN_FIELD_TYPE = None
FieldTypeEnum.addEnum('ECLIPSE_RESTART', 1)
FieldTypeEnum.addEnum('ECLIPSE_PARAMETER', 2)
FieldTypeEnum.addEnum('GENERAL', 3)
FieldTypeEnum.addEnum('UNKNOWN_FIELD_TYPE', 4)
|
8l/beri
|
cheritest/trunk/tests/fpu/test_raw_fpu_div_d32.py
|
Python
|
apache-2.0
| 1,767
| 0.003962
|
#-
# Copyright (c) 2013 Michael Roe
# All rights reserved.
#
# This software was developed by SRI International and the University of
# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237
# ("CTSRD"), as part of the DARPA CRASH research programme.
#
# @BERI_
|
LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/li
|
cense-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase
from nose.plugins.attrib import attr
class test_raw_fpu_div_d32(BaseBERITestCase):
@attr('float32')
def test_raw_fpu_div_d32_lower(self):
'''Test can divide in double precision when in 32-bit mode'''
self.assertRegisterMaskEqual(self.MIPS.a0, 0xffffffff, 0xd1bc2504, "Failed to divide 3456.3 by 12.45 in double precision")
@attr('float32')
def test_raw_fpu_div_d32_upper(self):
'''Test can divide in double precision when in 32-bit mode'''
self.assertRegisterEqual(self.MIPS.a1, 0x407159d4, "Failed to divide 3456.3 by 12.45 in double precision")
|
vectorgraphics/robox
|
bin/firstnozzle.py
|
Python
|
gpl-3.0
| 5,461
| 0.027467
|
#!/usr/bin/env python
import fileinput
import sys
import re
t0first=len(sys.argv) < 2 or sys.argv[1] != "1"
t0firstLayers=sys.argv[2:]
if len(t0firstLayers) == 0:
t0firstLayers=map(str,range(0,1000))
# t0firstLayers=["0"]
numeric="([-+]?(?:(?: \d* \. \d+ )|(?: \d+ \.? )))"
rX=re.compile("X"+numeric,re.VERBOSE)
rY=re.compile("Y"+numeric,re.VERBOSE)
rZ=re.compile("Z"+numeric,re.VERBOSE)
rF=re.compile("F"+numeric,re.VERBOSE)
rT=re.compile("T"+numeric,re.VERBOSE)
rD=re.compile("D"+numeric,re.VERBOSE)
rE=re.compile("E"+numeric,re.VERBOSE)
rL=re.compile(";LAYER:"+numeric,re.VERBOSE)
L=-1
T=0
X=0
Y=0
T0=[]
T1=[]
lX=[]
lY=[]
lZ=[]
lastT=-1
L=-1
B=0
Heater="M103"
finish=False
replenishD="G1 B1 F150 D0.30\n"
replenishE="G1 B1 F150 E0.30\n"
buffer=[]
prologue=[]
start=0
for line in fileinput.input("-"):
if finish:
prologue.append(line)
continue
lT=rT.findall(line)
if len(lT) > 0:
T=int(lT[0])
lZ=rZ.findall(line)
lX=rX.findall(line)
lY=rY.findall(line)
if len(lX) > 0:
X=float(lX[0])
if len(lY) > 0:
Y=float(lY[0])
reorder=L >= 0
if reorder and (re.match(Heater,line) or re.match("M140",line)):
line=""
if re.match("; Post print gcode",line):
finish=True
reorder=False
if reorder and not (re.match(";LAYER:",line) or len(lZ) > 0):
# if reorder and not (re.match(";LAYER:",line)): # For Automaker 4.0.0
if T == 0:
lD=rD.findall(line)
if T != lastT:
T0.append("G0 B0\n")
T0.append("T0 F12000 X"+str(X)+" Y"+str(Y)+"\n")
B=0
lastT=T
if B == 0:
if len(lD) > 0:
B=1
T0.append(replenishD)
T0.append(line)
elif T == 1:
if T != lastT:
T1.append("G0 B0\n")
T1.append("T1 F12000 X"+str(X)+" Y"+str(Y)+"\n")
B=0
lastT=T
if B == 0:
lE=rE.findall(line)
if len(lE) > 0:
B=1
T1.append(replenishE)
T1.append(line)
else:
buffer.append(line)
else:
if len(T0) > 0 and t0first:
for l in T0:
buffer.append(l)
T0=[]
if len(T1) > 0:
for l in T1:
buffer.append(l)
T1=[]
if len(T0) > 0 and not t0first:
for l in T0:
buffer.append(l)
T0=[]
lL=rL.findall(line)
if len(lL) > 0:
L=int(lL[0])
if L == 0 and start == 0:
start=len(buffer)
if L == 1:
Heater="M104"
if reorder:
buffer.append("G0 B0\n")
B=0
if L >= 0 and B == 0:
lD=rD.findall(line)
if len(lD) > 0:
T=0
B=1
buffer.append("T0\n"+replenishD)
lE=rE.findall(line)
if len(lE) > 0:
T=1
B=1
buffer.append("T1\n"+replenishE)
buffer.append(line)
lastT=-1
Heater="M103"
count=start
count0=0
count1=0
#pretime=100
pretime=60
posttime=100
primetime=pretime+posttime;
lastT=-1
T=lastT
time=0
X0=0
Y0=0
F=0
index=[0]*start
from math import sqrt
from bisect import bisect_left
while count < len(buffer):
lF=rF.findall(line)
lX=rX.findall(line)
lY=rY.findall(line)
if len(lF) > 0:
F=float(lF[0])/60
if len(lX) > 0:
X=float(lX[0])
if len(lY) > 0:
Y=float(lY[0])
dist=sqrt((X-X0)**2+(Y-Y0)**2)
time += dist/F
index.append(time)
X0=X
Y0=Y
line=buffer[count]
lL=rL.findall(line)
if len(lL) > 0:
L=int(lL[0])
if L == 1:
Heater="M104"
buffer.insert(count,"M140\n")
index.insert(count,index[count])
count += 1
lT=rT.findall(line)
if len(lT) > 0:
T=int(lT[0])
if T == 0:
if T != lastT:
lastT=T
if time-index[count1] > posttime:
buffer.insert(count1,Heater+" S0\n")
index.insert(count1,index[count1])
count += 1
i=max(count1+1,bisect_left(index,time-pretime))
if i > start and i < len(index):
buffer.insert(i,Heater+" S\n")
index.insert(i,index[i])
count += 1
count0=count
elif T == 1:
if T != lastT:
lastT=T
if time-index[count0] > posttime:
buffer.insert(count0,Heater+" T0\n")
index.insert(count0,index[count0])
count += 1
i=
|
max(count0+1,bisect_left(index,time-pretime)
|
)
if i > start and i < len(index):
buffer.insert(i,Heater+" T\n")
index.insert(i,index[i])
count += 1
count1=count
count += 1
if T == 1 and time-index[count1] > pretime:
buffer.insert(count1,Heater+" S0\n")
index.insert(count1,index[count1])
if T == 0 and time-index[count0] > pretime:
buffer.insert(count0,Heater+" T0\n")
index.insert(count0,index[count0])
for line in buffer:
sys.stdout.write(line)
for line in prologue:
sys.stdout.write(line)
sys.stdout.flush()
|
hiepthai/django-activity-stream
|
actstream/runtests/settings.py
|
Python
|
bsd-3-clause
| 3,999
| 0.00075
|
# Django settings for example_project project.
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import django
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Justin Quick', 'justquick@gmail.com'),
)
ENGINE = os.environ.get('DATABASE_ENGINE', 'django.db.backends.sqlite3')
DATABASES = {
'default': {
'ENGINE': ENGINE,
'NAME': 'test',
'OPTIONS': {
}
}
}
if 'postgres' in ENGINE or 'mysql' in ENGINE:
USER, PASSWORD = 'test', 'test'
if os.environ.get('TRAVIS', False):
if 'mysql' in ENGINE:
USER, PASSWORD = 'travis', ''
else:
USER, PASSWORD = 'postgres', ''
DATABASES['default'].update(
USER=os.environ.get('DATABASE_USER', USER),
PASSWORD=os.environ.get('DATABASE_PASSWORD', PASSWORD),
HOST=os.environ.get('DATABASE_HOST', 'localhost')
)
print(ENGINE)
# Local time zone for this installation. Choices can be found here:
|
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/New_York'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
|
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = 'media'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'wzf0h@r2u%m^_zgj^39-y(kd%+n+j0r7=du(q0^s@q1asdfasdfasdft%^2!p'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'actstream.runtests.urls'
TEMPLATE_DIRS = (
'templates',
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admindocs',
'django.contrib.sites',
'django.contrib.comments',
'actstream.runtests.testapp',
'actstream.runtests.testapp_nested',
'actstream',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.request',
)
ACTSTREAM_SETTINGS = {
'MANAGER': 'actstream.runtests.testapp.streams.MyActionManager',
'FETCH_RELATIONS': True,
'USE_PREFETCH': True,
'USE_JSONFIELD': True,
'GFK_FETCH_DEPTH': 0,
}
if django.VERSION[:2] >= (1, 5):
AUTH_USER_MODEL = 'testapp.MyUser'
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
if 'COVERAGE' in os.environ:
INSTALLED_APPS += ('django_coverage',)
TEST_RUNNER = 'django_coverage.coverage_runner.CoverageRunner'
COVERAGE_REPORT_HTML_OUTPUT_DIR = 'coverage'
COVERAGE_REPORT_DATA_FILE = '.coverage'
|
biberino/notes
|
dialogs/createNotebook.py
|
Python
|
mit
| 708
| 0.007062
|
# -*- coding: iso-8859-1 -*-
from gi.repository import Gtk, Gdk
class CreateNotebookDialog:
def __init__(self):
self.builder = Gtk.Builder()
self.builder.add_from_file("dialogs/createNotebook.glade")
self.window = self.builder.get_object("window1")
self.builder.connect_signals(self)
self.txtTitel = self.builder.get_obj
|
ect("txtTitel")
self.valid = False
self.titel = ""
Gtk.main()
#------ callbacks
def on_accept(self, *args):
self.valid = True
self.titel = self.txtTitel.get_text()
self.
|
window.destroy()
def on_window1_destroy(self, *args):
self.window.destroy()
Gtk.main_quit()
|
CoScale/coscale-generic-scripts
|
docker/docker-images.py
|
Python
|
bsd-3-clause
| 1,250
| 0.0056
|
#!/usr/bin/python
#
# Generic script to check how many images exist on the host
#
import sys
import json
import subprocess
# Configuration mode: return the custom metrics data should be defined
def config():
settings = {
"maxruntime": 30000, # How long the script is allowed to run
"period": 60, # The period the script will run, in this case it will run every 60 seconds
"metrics": [
{
"id": 0,
"datatype": "DOUBLE",
"name": "Number of Docker images",
"description": "Number of Docker images available on host",
"groups": "Docker images",
"unit": "",
"tags": "",
|
"calctype": "Instant"
}
]
}
print json.dumps(settings)
# Data retrieval mode: return the data for the custom metrics
def data():
# Get running container images
running = int(subprocess.check_output('docker images | wc -l', shell=True, stderr=subprocess.STDOUT)) - 1
print "M0 {}".format(running)
# Switch to check in which mode the script is running
if __name__ == "__main__":
if sys.a
|
rgv[1] == '-c':
config()
if sys.argv[1] == '-d':
data()
|
USGSDenverPychron/pychron
|
docs/user_guide/operation/scripts/examples/argus/measurement/jan_unknown400_120_HT_off.py
|
Python
|
apache-2.0
| 2,553
| 0.017235
|
#!Measurement
'''
baseline:
after: true
before: false
counts: 120
detector: H1
mass: 34.2
settling_time: 15.0
default_fits: nominal
equilibration:
eqtime: 40
inlet: R
inlet_delay: 3
outlet: O
use_extraction_eqtime: false
multicollect:
counts: 400
detector: H1
isotope: Ar40
peakcenter:
after: true
before: false
detector: H1
detectors:
- H1
- AX
- CDD
integration_time: 0.262144
isotope: Ar40
peakhop:
generate_ic_table: false
hops_name: ''
ncycles: 0
use_peak_hop: false
'''
ACTIVE_DETECTORS=('H2','H1','AX','L1','L2','CDD')
def main():
info('unknown measurement script')
activate_detectors(*ACTIVE_DETECTORS)
if mx.peakcenter.before:
peak_center(detector=mx.peakcenter.detector,isotope=mx.peakcenter.isotope)
if mx.baseline.before:
baselines(ncounts=mx.baseline.counts,mass=mx.
|
baseline.mass, detector=mx.baseline.detector,
settling_time=mx.baseline.settling_time)
position_magnet(mx.multicollect.isotope, detector=mx.multico
|
llect.detector)
#sniff the gas during equilibration
if mx.equilibration.use_extraction_eqtime:
eqt = eqtime
else:
eqt = mx.equilibration.eqtime
'''
Equilibrate is non-blocking so use a sniff or sleep as a placeholder
e.g sniff(<equilibration_time>) or sleep(<equilibration_time>)
'''
# turn off HV
set_deflection("CDD",2000)
sleep(2)
set_accelerating_voltage(0)
equilibrate(eqtime=eqt, inlet=mx.equilibration.inlet, outlet=mx.equilibration.outlet,
delay=mx.equilibration.inlet_delay)
#open('L')
set_time_zero()
sniff(eqt)
set_fits()
set_baseline_fits()
# turn on HV
set_accelerating_voltage(4500)
set_time_zero()
sleep(8)
set_deflection("CDD",10)
sleep(2)
#multicollect on active detectors
multicollect(ncounts=mx.multicollect.counts, integration_time=1)
if mx.baseline.after:
baselines(ncounts=mx.baseline.counts,mass=mx.baseline.mass, detector=mx.baseline.detector,
settling_time=mx.baseline.settling_time)
if mx.peakcenter.after:
activate_detectors(*mx.peakcenter.detectors, **{'peak_center':True})
peak_center(detector=mx.peakcenter.detector,isotope=mx.peakcenter.isotope,
integration_time=mx.peakcenter.integration_time)
if True:
#gosub('warm_cdd', argv=(mx.equilibration.outlet,))
gosub('warm_cdd')
info('finished measure script')
|
transcode-de/hopper
|
tests/form_data_tests/conftest.py
|
Python
|
bsd-3-clause
| 396
| 0.002525
|
# encoding: utf-8
|
import json
import pytest
from pytest_factoryboy import LazyFixture, register
from . import factories
@pytest.fixture
def elements(fixture):
return json.loads(fixture('simple_form.json'))['form']['elements']
register(factories.
|
FormDataFactory, 'form', elements=LazyFixture('elements'))
register(factories.FormDataFactory, 'form_with_user', author=LazyFixture('user'))
|
hekra01/mercurial
|
mercurial/hgweb/webcommands.py
|
Python
|
gpl-2.0
| 43,136
| 0.001437
|
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import os, mimetypes, re, cgi, copy
import webutil
from mercurial import error, encoding, archival, templater, templatefilters
from mercurial.node import short, hex
from mercurial import util
from common import paritygen, staticfile, get_contact, ErrorResponse
from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND
from mercurial import graphmod, patch
from mercurial import scmutil
from mercurial.i18n import _
from mercurial.error import ParseError, RepoLookupError, Abort
from mercurial import revset
__all__ = []
commands = {}
class webcommand(object):
"""Decorator used to register a web command handler.
The decorator takes as its positional arguments the name/path the
command should be accessible under.
Usage:
@webcommand('mycommand')
def mycommand(web, req, tmpl):
pass
"""
def __init__(self, name):
self.name = name
def __call__(self, func):
__all__.append(self.name)
commands[self.name] = func
return func
@webcommand('log')
def log(web, req, tmpl):
"""
/log[/{revision}[/{path}]]
--------------------------
Show repository or file history.
For URLs of the form ``/log/{revision}``, a list of changesets starting at
the specified changeset identifier is shown. If ``{revision}`` is not
defined, the default is ``tip``. This form is equivalent to the
``changelog`` handler.
For URLs of the form ``/log/{revision}/{file}``, the history for a specific
file will be shown. This form is equivalent to the ``filelog`` handler.
"""
if 'file' in req.form and req.form['file'][0]:
return filelog(web, req, tmpl)
else:
return changelog(web, req, tmpl)
@webcommand('rawfile')
def rawfile(web, req, tmpl):
guessmime = web.configbool('web', 'guessmime', False)
path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
if not path:
content = manifest(web, req, tmpl)
req.respond(HTTP_OK, web.ctype)
return content
try:
fctx = webutil.filectx(web.repo, req)
except error.LookupError, inst:
try:
content = manifest(web, req, tmpl)
req.respond(HTTP_OK, web.ctype)
return content
except ErrorResponse:
raise inst
path = fctx.path()
text = fctx.data()
mt = 'application/binary'
if guessmime:
mt = mimetypes.guess_type(path)[0]
if mt is None:
if util.binary(text):
mt = 'application/binary'
else:
mt = 'text/plain'
if mt.startswith('text/'):
mt += '; charset="%s"' % encoding.encoding
req.respond(HTTP_OK, mt, path, body=text)
return []
def _filerevision(web, tmpl, fctx):
f = fctx.path()
text = fctx.data()
parity = paritygen(web.stripecount)
if util.binary(text):
mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
text = '(binary:%s)' % mt
def lines():
for lineno, t in enumerate(text.splitlines(True)):
yield {"line": t,
"lineid": "l%d" % (lineno + 1),
"linenumber": "% 6d" % (lineno + 1),
"parity": parity.next()}
return tmpl("filerevision",
file=f,
path=webutil.up(f),
text=lines(),
rev=fctx.rev(),
node=fctx.hex(),
|
author=fctx.user(),
date=fctx.date(),
desc=fctx.description(),
extra=fctx.extra(),
|
branch=webutil.nodebranchnodefault(fctx),
parent=webutil.parents(fctx),
child=webutil.children(fctx),
rename=webutil.renamelink(fctx),
permissions=fctx.manifest().flags(f))
@webcommand('file')
def file(web, req, tmpl):
"""
/file/{revision}[/{path}]
-------------------------
Show information about a directory or file in the repository.
Info about the ``path`` given as a URL parameter will be rendered.
If ``path`` is a directory, information about the entries in that
directory will be rendered. This form is equivalent to the ``manifest``
handler.
If ``path`` is a file, information about that file will be shown via
the ``filerevision`` template.
If ``path`` is not defined, information about the root directory will
be rendered.
"""
path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
if not path:
return manifest(web, req, tmpl)
try:
return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
except error.LookupError, inst:
try:
return manifest(web, req, tmpl)
except ErrorResponse:
raise inst
def _search(web, req, tmpl):
MODE_REVISION = 'rev'
MODE_KEYWORD = 'keyword'
MODE_REVSET = 'revset'
def revsearch(ctx):
yield ctx
def keywordsearch(query):
lower = encoding.lower
qw = lower(query).split()
def revgen():
cl = web.repo.changelog
for i in xrange(len(web.repo) - 1, 0, -100):
l = []
for j in cl.revs(max(0, i - 99), i):
ctx = web.repo[j]
l.append(ctx)
l.reverse()
for e in l:
yield e
for ctx in revgen():
miss = 0
for q in qw:
if not (q in lower(ctx.user()) or
q in lower(ctx.description()) or
q in lower(" ".join(ctx.files()))):
miss = 1
break
if miss:
continue
yield ctx
def revsetsearch(revs):
for r in revs:
yield web.repo[r]
searchfuncs = {
MODE_REVISION: (revsearch, 'exact revision search'),
MODE_KEYWORD: (keywordsearch, 'literal keyword search'),
MODE_REVSET: (revsetsearch, 'revset expression search'),
}
def getsearchmode(query):
try:
ctx = web.repo[query]
except (error.RepoError, error.LookupError):
# query is not an exact revision pointer, need to
# decide if it's a revset expression or keywords
pass
else:
return MODE_REVISION, ctx
revdef = 'reverse(%s)' % query
try:
tree, pos = revset.parse(revdef)
except ParseError:
# can't parse to a revset tree
return MODE_KEYWORD, query
if revset.depth(tree) <= 2:
# no revset syntax used
return MODE_KEYWORD, query
if util.any((token, (value or '')[:3]) == ('string', 're:')
for token, value, pos in revset.tokenize(revdef)):
return MODE_KEYWORD, query
funcsused = revset.funcsused(tree)
if not funcsused.issubset(revset.safesymbols):
return MODE_KEYWORD, query
mfunc = revset.match(web.repo.ui, revdef)
try:
revs = mfunc(web.repo)
return MODE_REVSET, revs
# ParseError: wrongly placed tokens, wrongs arguments, etc
# RepoLookupError: no such revision, e.g. in 'revision:'
# Abort: bookmark/tag not exists
# LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo
except (ParseError, RepoLookupError, Abort, LookupError):
return MODE_KEYWORD, query
def changelist(**map):
count = 0
for ctx in searchfunc[0](funcarg):
count += 1
n = ctx.node()
showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
yield tmpl('searchentry',
parity=parity.next(),
|
schlizbaeda/yamuplay
|
pyudev/src/pyudev/_ctypeslib/__init__.py
|
Python
|
gpl-3.0
| 974
| 0
|
# -*- coding: utf-8 -*-
# Copyright (C) 2015 mulhern <amulhern@redhat.com>
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
# You should have
|
received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
pyudev._ctypeslib
==========
|
=======
Wrappers for libraries.
.. moduleauthor:: mulhern <amulhern@redhat.com>
"""
from . import libc
from . import libudev
|
ericlink/adms-server
|
playframework-dist/play-1.1/python/Lib/Cookie.py
|
Python
|
mit
| 26,007
| 0.01019
|
#!/usr/bin/env python
#
####
# Copyright 2000 by Timothy O'Malley <timo@alum.mit.edu>
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software
# and its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Timothy O'Malley not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR
# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
#
####
#
# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp
# by Timothy O'Malley <timo@alum.mit.edu>
#
# Cookie.py is a Python module for the handling of HTTP
# cookies as a Python dictionary. See RFC 2109 for more
# information on cookies.
#
# The original idea to treat Cookies as a dictionary came from
# Dave Mitchell (davem@magnet.com) in 1995, when he released the
# first version of nscookie.py.
#
####
r"""
Here's a sample session to show how to use this module.
At the moment, this is the only documentation.
The Basics
----------
Importing is easy..
>>> import Cookie
Most of the time you start by creating a cookie. Cookies come in
three flavors, each with slightly different encoding semantics, but
more on that later.
>>> C = Cookie.SimpleCookie()
>>> C = Cookie.SerialCookie()
>>> C = Cookie.SmartCookie()
[Note: Long-time users of Cookie.py will remember using
Cookie.Cookie() to cre
|
ate an Cookie object. Although deprecated, it
is still supported by the code. See the Backward Compatibility notes
for more information.]
Once you've created your Cookie, you can add values just as if it were
a dictionary.
>>> C = Cookie.SmartCookie()
>>> C["fig"] = "newton"
>>> C["sugar"] = "wafer"
>>> C.output()
'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer'
Notice that the printable representation of
|
a Cookie is the
appropriate format for a Set-Cookie: header. This is the
default behavior. You can change the header and printed
attributes by using the .output() function
>>> C = Cookie.SmartCookie()
>>> C["rocky"] = "road"
>>> C["rocky"]["path"] = "/cookie"
>>> print C.output(header="Cookie:")
Cookie: rocky=road; Path=/cookie
>>> print C.output(attrs=[], header="Cookie:")
Cookie: rocky=road
The load() method of a Cookie extracts cookies from a string. In a
CGI script, you would use this method to extract the cookies from the
HTTP_COOKIE environment variable.
>>> C = Cookie.SmartCookie()
>>> C.load("chips=ahoy; vienna=finger")
>>> C.output()
'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger'
The load() method is darn-tootin smart about identifying cookies
within a string. Escaped quotation marks, nested semicolons, and other
such trickeries do not confuse it.
>>> C = Cookie.SmartCookie()
>>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";')
>>> print C
Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;"
Each element of the Cookie also supports all of the RFC 2109
Cookie attributes. Here's an example which sets the Path
attribute.
>>> C = Cookie.SmartCookie()
>>> C["oreo"] = "doublestuff"
>>> C["oreo"]["path"] = "/"
>>> print C
Set-Cookie: oreo=doublestuff; Path=/
Each dictionary element has a 'value' attribute, which gives you
back the value associated with the key.
>>> C = Cookie.SmartCookie()
>>> C["twix"] = "none for you"
>>> C["twix"].value
'none for you'
A Bit More Advanced
-------------------
As mentioned before, there are three different flavors of Cookie
objects, each with different encoding/decoding semantics. This
section briefly discusses the differences.
SimpleCookie
The SimpleCookie expects that all values should be standard strings.
Just to be sure, SimpleCookie invokes the str() builtin to convert
the value to a string, when the values are set dictionary-style.
>>> C = Cookie.SimpleCookie()
>>> C["number"] = 7
>>> C["string"] = "seven"
>>> C["number"].value
'7'
>>> C["string"].value
'seven'
>>> C.output()
'Set-Cookie: number=7\r\nSet-Cookie: string=seven'
SerialCookie
The SerialCookie expects that all values should be serialized using
cPickle (or pickle, if cPickle isn't available). As a result of
serializing, SerialCookie can save almost any Python object to a
value, and recover the exact same object when the cookie has been
returned. (SerialCookie can yield some strange-looking cookie
values, however.)
>>> C = Cookie.SerialCookie()
>>> C["number"] = 7
>>> C["string"] = "seven"
>>> C["number"].value
7
>>> C["string"].value
'seven'
>>> C.output()
'Set-Cookie: number="I7\\012."\r\nSet-Cookie: string="S\'seven\'\\012p1\\012."'
Be warned, however, if SerialCookie cannot de-serialize a value (because
it isn't a valid pickle'd object), IT WILL RAISE AN EXCEPTION.
SmartCookie
The SmartCookie combines aspects of each of the other two flavors.
When setting a value in a dictionary-fashion, the SmartCookie will
serialize (ala cPickle) the value *if and only if* it isn't a
Python string. String objects are *not* serialized. Similarly,
when the load() method parses out values, it attempts to de-serialize
the value. If it fails, then it fallsback to treating the value
as a string.
>>> C = Cookie.SmartCookie()
>>> C["number"] = 7
>>> C["string"] = "seven"
>>> C["number"].value
7
>>> C["string"].value
'seven'
>>> C.output()
'Set-Cookie: number="I7\\012."\r\nSet-Cookie: string=seven'
Backwards Compatibility
-----------------------
In order to keep compatibilty with earlier versions of Cookie.py,
it is still possible to use Cookie.Cookie() to create a Cookie. In
fact, this simply returns a SmartCookie.
>>> C = Cookie.Cookie()
>>> print C.__class__.__name__
SmartCookie
Finis.
""" #"
# ^
# |----helps out font-lock
#
# Import our required modules
#
import string
try:
from cPickle import dumps, loads
except ImportError:
from pickle import dumps, loads
import re, warnings
__all__ = ["CookieError","BaseCookie","SimpleCookie","SerialCookie",
"SmartCookie","Cookie"]
_nulljoin = ''.join
_semispacejoin = '; '.join
_spacejoin = ' '.join
#
# Define an exception visible to External modules
#
class CookieError(Exception):
pass
# These quoting routines conform to the RFC2109 specification, which in
# turn references the character definitions from RFC2068. They provide
# a two-way quoting algorithm. Any non-text character is translated
# into a 4 character sequence: a forward-slash followed by the
# three-digit octal equivalent of the character. Any '\' or '"' is
# quoted with a preceeding '\' slash.
#
# These are taken from RFC2068 and RFC2109.
# _LegalChars is the list of chars which don't require "'s
# _Translator hash-table for fast quoting
#
_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~"
_Translator = {
'\000' : '\\000', '\001' : '\\001', '\002' : '\\002',
'\003' : '\\003', '\004' : '\\004', '\005' : '\\005',
'\006' : '\\006', '\007' : '\\007', '\010' : '\\010',
'\011' : '\\011', '\012' : '\\012', '\013' : '\\013',
'\014' : '\\014', '\015' : '\\015', '\016' : '\\016',
'\017' : '\\017', '\020' : '\\020', '\021' : '\\021',
'\022' : '\\0
|
liveblog/liveblog
|
server/liveblog/tests/test_settings.py
|
Python
|
agpl-3.0
| 40
| 0
|
DATE_
|
FORMAT = '%Y-%m-%dT%H:%M:%S+
|
00:00'
|
kumarisneha/practice_repo
|
techgig/techgig_isnumeric.py
|
Python
|
mit
| 117
| 0.025641
|
def main():
s=raw_input()
|
if s.isdigit():
print "Tru
|
e"
else:
print "False"
main()
|
calpaterson/recall
|
src/recall/search.py
|
Python
|
agpl-3.0
| 7,881
| 0.003936
|
# -*- coding: utf-8 -*-
# Recall is a program for storing bookmarks of different things
# Copyright (C) 2012 Cal Paterson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from pprint import pformat
from urllib.parse import urlparse
import json
import robotexclusionrulesparser as rerp
import time
import requests
from bs4 import BeautifulSoup
from redis import StrictRedis
from recall import messages, jobs
from recall import convenience as conv
def get_es_base_url():
return "http://" + conv.settings["RECALL_ELASTICSEARCH_HOST"] + ":" +\
conv.settings["RECALL_ELASTICSEARCH_PORT"]
def get_es_mark_url():
return "{es_base_url}/{index}/mark".format(
es_base_url=get_es_base_url(),
index=conv.settings["RECALL_ELASTICSEARCH_INDEX"])
def set_mapping():
response = requests.put(get_es_mark_url() + "/_mapping",
data=json.dumps(mapping))
mapping = {
"mark": {
"properties": {
"~": {
"type": "long",
"store": "yes",
"index": "yes"},
"@": {
"index": "not_analyzed"}}}}
def clear():
url = "{search_url}/{index}".format(
search_url = get_es_base_url(),
index = conv.settings["RECALL_ELASTICSEARCH_INDEX"])
requests.delete(url)
class IncoherentSearchQueryException(Exception):
pass
class SearchQueryBuilder(object):
def __init__(self):
self.of_size(10)
self.as_user_set = False
self.filters = []
self.queries = []
self.sort = None
def with_keywords(self, string):
self.queries.append({"match": {"_all": string}})
return self
def of_size(self, size):
self.size = size
return self
def about(self, tag):
self.filters.append({"term": {"about": tag}})
return self
def not_about(self, tag):
self.filters.append({"not": {"term": {"about": tag}}})
return self
def as_user(self, user):
if self.as_user_set:
raise IncoherentSearchQueryException(
"Tried to search as user but already anonymous")
self.as_user_set = True
# Have not worked out how to correctly escape @ for elasticsearch
at_sign_workaround = user["email"].split("@")[0]
self.filters.append(
{"or": [
{"term": {"@": at_sign_workaround}},
{"not": {"term": {"%private": True}}}]})
return self
def only_user(self, user):
at_sign_workaround = user["email"].split("@")[0]
self.filters.append(
{"term": {"@": at_sign_workaround}})
return self
def the_url(self, url):
self.queries.append({"match": { "hyperlink": url}})
return self
def anonymously(self):
if self.as_user_set:
raise IncoherentSearchQueryException(
"Tried to search anonymously but user has already been set")
self.as_user_set = True
self.filters.append({"not": {"term": {"%private": True}}})
return self
def sort_by_when(self):
self.sort = [{"~": {"order": "desc"}}]
return self
def build(self):
query_and_filters = {
"filter": {"and": self.filters,}
}
if self.queries == []:
query_and_filters.update({"query": {"match_all": {}}})
else:
query_and_filters.update(
{"query": {"bool": {
"must": self.queries
}}})
query = {
"size": self.size,
"query":{
"filtered": query_and_filters
}
}
if self.sort is not None:
query["sort"] = self.sort
return query
def __str__(self):
return pformat(self.build())
def status():
try:
if requests.get(get_es_base_url()).json["ok"]:
return "ok"
else:
return "ERROR"
except Exception as e:
return "ERROR"
def search(queryBuilder):
response = requests.get(get_es_mark_url() + "/_search?",
data=json.dumps(queryBuilder.build()))
marks = []
try:
for mark in response.json["hits"]["hits"]:
marks.append(mark["_source"])
except KeyError:
conv.logger("search").exception("Elasticsearch error: " + str(response.json))
return response.json["hits"]["total"], marks
class IndexRecord(jobs.Job):
"""Index a record (part of a mark) in elasticsearch"""
user_agent = "Recall - email cal@calpaterson.com for support"
def __init__(self, record):
self.record = record
def may_fetch(self, hyperlink):
url_obj = urlparse(hyperlink)
robots_url = url_obj.scheme + "://" + url_obj.netloc + "/robots.txt"
robots_parser = rerp.RobotExclusionRulesParser()
robots_parser.user_agent = self.user_agent
robots_parser.fetch(robots_url)
allowed = robots_parser.is_allowed(self.user_agent, hyperlink)
if not allowed:
self.logger.warn("Not allowed to fetch " + hyperlink)
return allowed
def get_fulltext(self, mark):
try:
headers = {"User-Agent": self.user_agent}
if "hyperlink" in mark and self.may_fetch(mark["hyperlink"]):
response = requests.get(mark["hyperlink"], headers=headers)
if response.status_code in range(200, 300):
mark["£fulltext"] = BeautifulSoup(response.content).get_text()
else:
self.logger.warn("Requested {hyperlink}, but got {status_code}".format(
hyperlink=mark["hyperlink"],
status_code=response.status_code))
except Exception as e:
try:
status_code = response.status_code
except NameError:
status_code = None
self.logger.exception("Error while getting fulltext" + repr({
"hyperlink": mark["hyperlink"],
"r
|
esponse_status": status_code}))
def update_last_indexed_time(self, mark):
mark["£last_indexed"] = int(time.time())
db = conv.db()
db.marks.update(
{"@": mark["@"], "~": mark["~"]},
{"$set": {"£last_indexed": mark["£last_indexed"]},
"$unset": "£q"})
def mark_for_record(self, record):
if ":" not in
|
record:
mark = record
else:
db = conv.db()
mark = db.marks.find_one(
{"@": record[":"]["@"], "~": record[":"]["~"]})
del mark["_id"]
return mark
def do(self):
mark = self.mark_for_record(self.record)
self.update_last_indexed_time(mark)
self.get_fulltext(mark)
url = "http://{hostname}:{port}/{index}/{type}/{id}".format(
hostname = conv.settings["RECALL_ELASTICSEARCH_HOST"],
port = int(conv.settings["RECALL_ELASTICSEARCH_PORT"]),
index = conv.settings["RECALL_ELASTICSEARCH_INDEX"],
type = "mark",
id = mark["@"] + str(mark["~"]))
requests.post(url, data=json.dumps(mark))
|
VitalPet/c2c-rd-addons
|
c2c_budget_chricar/wizard/chart.py
|
Python
|
agpl-3.0
| 12,006
| 0.010082
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import time
from openerp.report import report_sxw
import logging
class report_webkit_html(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(report_webkit_html, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'cr':cr,
'uid': uid,
})
class budget_item_chart(osv.osv_memory):
"""
For Chart of Busget Items
"""
_name = "account.analytic.chart.sum"
_description = "Account Analytic chart"
_logger = logging.getLogger(_name)
_columns = {
'chart_account_id': fields.many2one('c2c_budget.item', \
'Budget Top Item', \
domain = [('parent_id','=',False)] ,\
required=True),
'fiscalyear': fields.many2one('account.fiscalyear', \
'Fiscal year', \
required=True),
'period_from': fields.many2one('account.period', 'Start period'),
'period_to': fields.many2one('account.period', 'End period'),
'period_prev_from': fields.many2one('account.period', 'Start period prev FY'),
'period_prev_to': fields.many2one('account.period', 'End period prev FY'),
'print_all_zero': fields.boolean('Print lines with all zero'),
'print_chapter' : fields.boolean('Print chapter column'),
'print_opening_dc' : fields.boolean('Print opening balance, debit and credit columns'),
'print_views_only' : fields.boolean('Print only accounts of type view'),
'print_previous_1000' : fields.boolean('Print previous balance in 1000'),
}
_defaults = {
'print_chapter': lambda *a: True,
'print_opening_dc': lambda *a: True,
}
def onchange_fiscalyear(self, cr, uid, ids, fiscalyear_id=False, context=None):
res = {}
res['value'] = {}
if fiscalyear_id:
start_period = end_period = False
#FIXME - check if closing periods are handled correctly
# FIXME 2 statements because UNION does not guarantee a correct a correct sort of results.
cr.execute('''
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
ORDER BY p.date_start ASC
LIMIT 1) AS period_start
UNION ALL
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
AND p.date_start < NOW()
ORDER BY p.date_stop DESC
LIMIT 1) AS period_stop
''', (fiscalyear_id, fiscalyear_id))
periods = [i[0] for i in cr.fetchall()]
if periods and len(periods) > 1:
start_period = periods[0]
end_period =
|
periods[1]
res['value'] = {'period_from': start_period, 'period_to': end_period}
cr.execute('''
SELECT * FROM (SELECT p.id
FROM account_period p,
account_fiscalyear f,
account_fiscalyear pf
WHERE f.id = %s
AND pf.date_stop = f.date_start -1
AND p.fiscalyea
|
r_id = pf.id
ORDER BY p.date_start ASC
LIMIT 1) AS period_prev_start
UNION ALL
SELECT * FROM (SELECT p.id
FROM account_period p,
account_fiscalyear f,
account_fiscalyear pf
WHERE f.id = %s
AND pf.date_stop = f.date_start -1
AND p.fiscalyear_id = pf.id
ORDER BY p.date_stop desc
LIMIT 1) AS period_prev_start
''', (fiscalyear_id, fiscalyear_id))
periods_prev = [i[0] for i in cr.fetchall()]
if periods_prev and len(periods_prev) > 1:
start_prev_period = periods_prev[0]
end_prev_period = periods_prev[1]
res['value'] = {'period_from': start_period,
'period_to' : end_period,
'period_prev_from': start_prev_period,
'period_prev_to' : end_prev_period,
}
return res
def budget_item_chart_open(self, cr, uid, ids, context=None):
"""
Opens chart of Accounts
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of account chart’s IDs
@return: dictionary of Open account chart window on given fiscalyear and all Entries or posted entries
"""
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
rep_obj = self.pool.get('ir.actions.report.xml')
period_obj = self.pool.get('account.period')
fy_obj = self.pool.get('account.fiscalyear')
if context is None:
context = {}
data = self.read(cr, uid, ids, [], context=context)[0]
self._logger.debug('open `%s` `%s` `%s`', context.get('open'), data['period_from'][0], data['period_to'][0])
if context.get('open') == 'view':
result = mod_obj.get_object_reference(cr, uid, 'c2c_budget', 'open_budget_items_tree')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
elif context.get('open') == 'report':
result = mod_obj.get_object_reference(cr, uid, 'c2c_budget', 'report_c2c_budget_item_chart')
id = result and result[1] or False
result = rep_obj.read(cr, uid, [id], context=context)[0]
#FIXME
# does not open report
result['periods'] = []
if data['period_from'] and data['period_to']:
result['periods'] = period_obj.build_ctx_periods(cr, uid, data['period_from'][0], data['period_to'][0])
result['context'] = str({'fiscalyear': data['fiscalyear'][0], 'periods': result['periods'] })
if data['period_prev_from'] and data['period_prev_to']:
result['periods_prev'] = period_obj.build_ctx_periods(cr, uid, data['period_prev_from'][0], data['period_prev_to'][0])
if result['periods_prev']:
result['context'] = str({'fiscalyear': data['fiscalyear'][0],
|
IlfirinIlfirin/shavercoin
|
contrib/wallettools/walletchangepass.py
|
Python
|
mit
| 220
| 0
|
from jsonrpc import ServiceProxy
access = ServiceProxy("http://127.0.0.1:9447")
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new w
|
allet passphrase: ")
access.walletpassphrasechang
|
e(pwd, pwd2)
|
amurzeau/streamlink-debian
|
tests/plugins/test_senategov.py
|
Python
|
bsd-2-clause
| 827
| 0.002418
|
import unittest
from streamlink.plugins.senategov import SenateGov
from tests.plugins import PluginCanHandleUrl
class TestPluginCanHandleUrlSenateGov(PluginCanHandleUrl):
__plugin__ = SenateGov
should_match = [
"https://www.foreign.senate.gov/hearings/business-meeting-082218"
"https://www.senate.gov/isvp/?comm=foreign&type=arch&stt=21:50&filename=foreign082218&auto_play=false"
+ "&wmode=transparent&poster=https%3A%2F%2Fwww%2Eforeign%2Esenate%2Egov%2Fthemes%2Fforeign%2Fimages"
+ "%2Fvideo-poster-flash-fit%2Epng"
]
class TestPluginSenateGov(unittest.TestCase):
def test_stt_parse(self):
self.assertEqual(600, SenateGov.parse_stt("10
|
:00"))
self.assertEqual(3600, SenateGov.parse_stt("01:00
|
:00"))
self.assertEqual(70, SenateGov.parse_stt("1:10"))
|
colloquium/cobbler
|
cobbler/action_reposync.py
|
Python
|
gpl-2.0
| 22,241
| 0.006519
|
"""
Builds out and synchronizes yum repo mirrors.
Initial support for rsync, perhaps reposync coming later.
Copyright 2006-2007, Red Hat, Inc
Michael DeHaan <mdehaan@redhat.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import os
import os.path
import time
import yaml # Howell-Clark version
import sys
HAS_YUM = True
try:
import yum
except:
HAS_YUM = False
import utils
from cexceptions import *
import traceback
import errno
from utils import _
import clogger
class RepoSync:
"""
Handles conversion of internal state to the tftpboot tree layout
"""
# ==================================================================================
def __init__(self,config,tries=1,nofail=False,logger=None):
"""
Constructor
"""
self.verbose = True
self.api = config.api
self.config = config
self.distros = config.distros()
self.profiles = config.profiles()
self.systems = config.systems()
self.settings = config.settings()
self.repos = config.repos()
self.rflags = self.settings.reposync_flags
self.tries = tries
self.nofail = nofail
self.logger = logger
if logger is None:
self.logger = clogger.Logger()
self.logger.info("hello, reposync")
# ===================================================================
def run(self, name=None, verbose=True):
"""
Syncs the current repo configuration file with the filesystem.
"""
self.logger.info("run, reposync, run!")
try:
self.tries = int(self.tries)
except:
utils.die(self.logger,"retry value must be an integer")
self.verbose = verbose
report_failure = False
for repo in self.repos:
env = repo.environment
for k in env.keys():
self.logger.info("environment: %s=%s" % (k,env[k]))
if env[k] is not None:
os.putenv(k,env[k])
if name is not None and repo.name != name:
# invoked to sync only a specific repo, this is not the one
continue
elif name is None and not repo.keep_updated:
# invoked to run against all repos, but this one is off
self.logger.info("%s is set to not be updated" % repo.name)
continue
repo_mirror = os.path.join(self.settings.webdir, "repo_mirror")
repo_path = os.path.join(repo_mirror, repo.name)
mirror = repo.mirror
if not os.path.isdir(repo_path) and not repo.mirror.lower().startswith("rhn://"):
os.makedirs(repo_path)
# which may actually NOT reposync if the repo is set to not mirror locally
# but that's a technicality
for x in range(self.tries+1,1,-1):
success = False
try:
self.sync(repo)
success = True
except:
utils.log_exc(self.logger)
self.logger.warning("reposync failed, tries left: %s" % (x-2))
if not success:
report_failure = True
if not self.nofail:
utils.die(self.logger,"reposync failed, retry limit reached, aborting")
else:
self.logger.error("reposync failed, retry limit reached, skipping")
self.update_permissions(repo_path)
if report_failure:
utils.die(self.logger,"overall reposync failed, at least one repo failed to synchronize")
return True
# ==================================================================================
def sync(self, repo):
"""
Conditionally sync a repo, based on type.
"""
if repo.breed == "rhn":
return self.rhn_syn
|
c(repo)
elif repo.breed == "yum":
return self.yum_sync(repo)
#elif repo.breed == "apt":
# return self.apt_sync(repo)
elif repo.breed == "rsync":
return self.rsync_sync(repo)
else:
utils.die(self.logger,"unable to sync repo (%s), unknown or unsupporte
|
d repo type (%s)" % (repo.name, repo.breed))
# ====================================================================================
def createrepo_walker(self, repo, dirname, fnames):
"""
Used to run createrepo on a copied Yum mirror.
"""
if os.path.exists(dirname) or repo['breed'] == 'rsync':
utils.remove_yum_olddata(dirname)
# add any repo metadata we can use
mdoptions = []
if os.path.isfile("%s/.origin/repomd.xml" % (dirname)):
if not HAS_YUM:
utils.die(self.logger,"yum is required to use this feature")
rmd = yum.repoMDObject.RepoMD('', "%s/.origin/repomd.xml" % (dirname))
if rmd.repoData.has_key("group"):
groupmdfile = rmd.getData("group").location[1]
mdoptions.append("-g %s" % groupmdfile)
if rmd.repoData.has_key("prestodelta"):
# need createrepo >= 0.9.7 to add deltas
if utils.check_dist() == "redhat" or utils.check_dist() == "suse":
cmd = "/usr/bin/rpmquery --queryformat=%{VERSION} createrepo"
createrepo_ver = utils.subprocess_get(self.logger, cmd)
if createrepo_ver >= "0.9.7":
mdoptions.append("--deltas")
else:
utils.die(self.logger,"this repo has presto metadata; you must upgrade createrepo to >= 0.9.7 first and then need to resync the repo through cobbler.")
blended = utils.blender(self.api, False, repo)
flags = blended.get("createrepo_flags","(ERROR: FLAGS)")
try:
# BOOKMARK
cmd = "createrepo %s %s %s" % (" ".join(mdoptions), flags, dirname)
utils.subprocess_call(self.logger, cmd)
except:
utils.log_exc(self.logger)
self.logger.error("createrepo failed.")
del fnames[:] # we're in the right place
# ====================================================================================
def rsync_sync(self, repo):
"""
Handle copying of rsync:// and rsync-over-ssh repos.
"""
repo_mirror = repo.mirror
if not repo.mirror_locally:
utils.die(self.logger,"rsync:// urls must be mirrored locally, yum cannot access them directly")
if repo.rpm_list != "" and repo.rpm_list != []:
self.logger.warning("--rpm-list is not supported for rsync'd repositories")
# FIXME: don't hardcode
dest_path = os.path.join("/var/www/cobbler/repo_mirror", repo.name)
spacer = ""
if not repo.mirror.startswith("rsync://") and not repo.mirror.startswith("/"):
spacer = "-e ssh"
if not repo.mirror.endswith("/"):
repo.mirror = "%s/" % repo.mirror
# FIXME: wrapper for subprocess that logs to logger
cmd = "rsync -rltDv %s --delete --exclude-from=/etc/cobbler/rsync.exclude %s %s" % (spacer, repo.mirror, dest_path)
rc = utils.subprocess_call(self.logger, cmd)
if
|
yntantan/beetles
|
dbdb/tool.py
|
Python
|
apache-2.0
| 1,012
| 0
|
from __future__ import print_function
import sys
import dbdb
OK = 0
BAD_ARGS = 1
BAD_VERB = 2
BAD_KEY = 3
def usage():
print("Usage:", file=sys.stderr)
print("\tpython -m dbdb.tool DBNAME get KEY", file=sys.stderr)
print("\tpython -m dbdb.tool DBNAME set KEY VALUE", file=sys.stderr)
print("\tpython -m dbdb.tool DBNAME d
|
elete KEY", file=sys.stderr)
def main(argv):
if not (4 <= len(argv) <= 5):
usage()
return BAD_ARGS
|
dbname, verb, key, value = (argv[1:] + [None])[:4]
if verb not in {'get', 'set', 'delete'}:
usage()
return BAD_VERB
db = dbdb.connect(dbname)
try:
if verb == 'get':
sys.stdout.write(db[key])
elif verb == 'set':
db[key] = value
db.commit()
else:
del db[key]
db.commit()
except KeyError:
print("Key not found", file=sys.stderr)
return BAD_KEY
return OK
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
driftx/Telephus
|
telephus/cassandra/constants.py
|
Python
|
mit
| 187
| 0
|
#
# Aut
|
ogenerated by Thrift Compiler (0.7.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from thrift.Thrift import *
from ttypes import *
VERSION = "19.35
|
.0"
|
uday12kumar/myreview
|
reviews/reviews/wsgi.py
|
Python
|
mit
| 1,422
| 0.000703
|
"""
WSGI config for reviews project.
|
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It shou
|
ld expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "reviews.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "reviews.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
deKupini/erp
|
addons/project_issue/project_issue.py
|
Python
|
agpl-3.0
| 30,662
| 0.004925
|
#-*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import calendar
from datetime import datetime,date
from dateutil import relativedelta
import json
import time
from openerp import api
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.osv import fields, osv, orm
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
from openerp.tools import html2plaintext
from openerp.tools.translate import _
from openerp.exceptions import UserError, AccessError
class project_issue_version(osv.Model):
_name = "project.issue.version"
_order = "name desc"
_columns = {
'name': fields.char('Version Number', required=True),
'active': fields.boolean('Active', required=False),
}
_defaults = {
'active': 1,
}
class project_issue(osv.Model):
_name = "project.issue"
_description = "Project Issue"
_order = "priority desc, create_date desc"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_mail_post_access = 'read'
_track = {
'stage_id': {
# this is only an heuristics; depending on your particular stage configuration it may not match all 'new' stages
'project_issue.mt_issue_new': lambda self, cr, uid, obj, ctx=None: obj.stage_id and obj.stage_id.sequence <= 1,
'project_issue.mt_issue_stage': lambda self, cr, uid, obj, ctx=None: obj.stage_id and obj.stage_id.sequence > 1,
},
'user_id': {
'project_issue.mt_issue_assigned': lambda self, cr, uid, obj, ctx=None: obj.user_id and obj.user_id.id,
},
'kanban_state': {
'project_issue.mt_issue_blocked': lambda self, cr, uid, obj, ctx=None: obj.kanban_state == 'blocked',
'project_issue.mt_issue_ready': lambda self, cr, uid, obj, ctx=None: obj.kanban_state == 'done',
},
}
def _get_default_partner(self, cr, uid, context=None):
project_id = self._get_default_project_id(cr, uid, context)
if project_id:
project = self.pool.get('project.project').browse(cr, uid, project_id, context=context)
if project and project.partner_id:
return project.partner_id.id
return False
def _get_default_project_id(self, cr, uid, context=None):
""" Gives default project by checking if present in the context """
return self._resolve_project_id_from_context(cr, uid, context=context)
def _get_default_stage_id(self, cr, uid, context=None):
""" Gives default stage_id """
project_id = self._get_default_project_id(cr, uid, context=context)
return self.stage_find(cr, uid, [], project_id, [('fold', '=', False)], context=context)
def _resolve_project_id_from_context(self, cr, uid, context=None):
""" Returns ID of project based on the value of 'default_project_id'
context key, or None if it cannot be resolved to a single
project.
"""
if context is None:
context = {}
if type(context.get('default_project_id')) in (int, long):
return context.get('default_project_id')
if isinstance(context.get('default_project_id'), basestring):
project_name = context['default_project_id']
project_ids = self.pool.get('project.project').name_search(cr, uid, name=project_name, context=context)
if len(project_ids) == 1:
return int(project_ids[0][0])
return None
def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None):
access_rights_uid = access_rights_uid or uid
stage_obj = self.pool.get('project.task.type')
order = stage_obj._order
# lame hack to allow reverting search, should just work in the trivial case
if read_group_order == 'stage_id desc':
order = "%s desc" % order
# retrieve team_id from the context and write the domain
# - ('id', 'in', 'ids'): add columns that should be present
# - OR ('case_default', '=', True), ('fold', '=', False): add default columns that are not folded
# - OR ('project_ids', 'in', project_id), ('fold', '=', False) if project_id: add project columns that are not folded
search_domain = []
project_id = self._resolve_project_id_from_context(cr, uid, context=context)
if project_id:
search_domain += ['|', ('project_ids', '=', project_id), ('id', 'in', ids)]
else:
search_domain += ['|', ('id', 'in', ids), ('case_default', '=', True)]
# perform search
stage_ids = stage_obj._search(cr, uid, search_domain, order=order, access_rights_uid=access_rights_uid, context=context)
result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context)
# restore order of the search
result.sort(lambda x, y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0])))
fold = {}
for stage in stage_obj.browse(cr, access_rights_uid, stage_ids, context=context):
fold[stage.id] = stage.fold or False
return result, fold
def _compute_day(self, cr, uid, ids, fields, args, context=None):
"""
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of Openday’s IDs
@return: difference between current date and log d
|
ate
@param context: A standard dictionary for contextual values
"""
Calendar = self.pool['resource.calendar']
res = dict((res_id, {}) for res_id in ids)
for issue in self.browse(cr, uid, ids, context=context):
values = {
'day_open': 0.0, 'day_close': 0.0,
'working_hours_open': 0.0, 'working_hours
|
_close': 0.0,
'days_since_creation': 0.0, 'inactivity_days': 0.0,
}
# if the working hours on the project are not defined, use default ones (8 -> 12 and 13 -> 17 * 5), represented by None
calendar_id = None
if issue.project_id and issue.project_id.resource_calendar_id:
calendar_id = issue.project_id.resource_calendar_id.id
dt_create_date = datetime.strptime(issue.create_date, DEFAULT_SERVER_DATETIME_FORMAT)
if issue.date_open:
dt_date_open = datetime.strptime(issue.date_open, DEFAULT_SERVER_DATETIME_FORMAT)
values['day_open'] = (dt_date_open - dt_create_date).total_seconds() / (24.0 * 3600)
values['working_hours_open'] = Calendar._interval_hours_get(
cr, uid, calendar_id, dt_create_date, dt_date_open,
timezone_from_uid=issue.user_id.id or uid,
exclude_leaves=False, context=context)
if issue.date_closed:
dt_date_closed = datetime.strptime(issue.date_closed, DEFAULT_SERVER_DATETIME_FORMAT)
values['day_close'] = (dt_date_closed - dt_create_date).total_seconds() / (24.0 * 3600)
values['working_hours_close'] = Calendar._interval_hours_get(
cr, uid, calendar_id, dt_create_date, dt_date_closed,
timezone_from_ui
|
andpp/cherrymusic
|
cherrymusicserver/service.py
|
Python
|
gpl-3.0
| 7,773
| 0.000129
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# CherryMusic - a standalone music server
# Copyright (c) 2012 - 2014 Tom Wallroth & Tilman Boerner
#
# Project page:
# http://fomori.org/cherrymusic/
# Sources on github:
# http://github.com/devsnd/cherrymusic/
#
# CherryMusic is based on
# jPlayer (GPL/MIT license) http://www.jplayer.org/
# CherryPy (BSD license) http://www.cherrypy.org/
#
# licensed under GNU GPL version 3 (or later)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
""" Dependency injection and other facilities to match providers of services
with their users.
Nature and interface of a service are left for the concerned parties to
agree on; all this module knows about the service is its name, or "handle".
Basic usage::
>>> pizza = object()
>>> service.provide('pizzaservice', pizza)
>>> pizza is service.get('pizzaservice')
True
Types as providers and users::
>>> class PizzaService(object):
... pass
...
>>> @service.user(mypizza='pizzaservice') # become a user
... class PizzaUser(object):
... pass
...
>>> user = PizzaUser()
>>> service.provide('pizzaservice', PizzaService)
>>> isinstance(user.mypizza, PizzaService) # provider as attribute
True
"""
import threading
from cherrymusicserver import log
class MutualDependencyBreak(Exception):
"""Raised when mutually dependent providers are trying to instantiate
each other in their constructors.
This happens while creating a provider that is part of a dependency
cycle; when it is allowed, in its constructor, to access a dependency
that's also part of the cycle, a singularity is spawned which implodes the
universe. This exception is raised to prevent that.
In general, don't create cyclic dependencies. It's bad for your brain and
also a sure sign of a problematic program architecture. When confronted
with a mutual dependency, extract a third class from one of the offenders
for both to depend on.
"""
pass
__provider_factories = {}
__providercache = {}
def provide(handle, provider, args=(), kwargs={}):
""" Activate a provider for the service identified by ``handle``,
replacing a previous provider for the same service.
If the provider is a ``type``, an instance will be created as the
actual provider. Instantiation is lazy, meaning it will be deferred
until the provider is requested (:func:`get`) by some user.
To use a type as a provider, you need to wrap it into something that is
not a type.
handle : str
The name of the serivce.
provider :
An object that provides the service, or a type that instantiates
such objects. Instantiation will happen on the first get call.
args, kwargs :
Pass on arguments to a type.
"""
assert isinstance(provider, type) or not (args or kwargs)
__provider_factories[handle] = _ProviderFactory.get(provider, args, kwargs)
__providercache.pop(handle, None)
log.d('service %r: now provided by %r', handle, provider)
def get(handle):
"""Request the provider for the service identified by ``handle``.
If a type was registered for the handle, the actual provider will be the
result of instantiating the type when it is first requested.
Although the goal is to create only one instance, it is possible that
different threads see different instances.
"""
try:
return __providercache[handle]
except KeyError:
return _createprovider(handle)
class require(object):
"""Descriptor to make a service provider available as a class attribute.
>>> import cherrymusicserver.service as service
>>> class ServiceUser(object):
... mypizzas = service.require('pizzaservice')
"""
def __init__(self, handle):
self.handle = handle
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.handle)
def __get__(self, instance, owner):
return get(self.handle)
def user(**requirements):
""" Class deocrator to inject service providers as attributes into the
decorated class.
requirements : name=handle
Create :class:`require` descriptor attributes in the class:
``name = require(handle)``.
Returns: Class Decorator
A function that takes the user class as its sole argument.
"""
def clsdecorator(cls):
for attribute, handle in requirements.items():
setattr(cls, attribute, require(handle))
return cls
return clsdecorator
def _createprovider(handle):
try:
factory = __provider_factories[handle]
except KeyError:
raise LookupError('Service not available: {0!r}'.format(handle))
return __providercache.setdefault(handle, factory.make())
class _ProviderFactory(object):
""" High security facility to contain cyclic dependency and multithreading
issues.
Factory instances guard against dependency cycles by raising a
:class:`MutualDependencyBreak` when mutu
|
ally dependent providers
try to instantiate each other.
"""
_master_lock = threading.Lock()
__factories = {}
@classmethod
def get(cls, provider, args=(), kwargs=None):
if kwargs is None:
kwargs = {}
with cls._mast
|
er_lock:
try:
factory = cls.__factories[id(provider)]
factory.args = args
factory.kwargs = kwargs
except KeyError:
factory = cls(provider, args, kwargs)
cls.__factories[id(provider)] = factory
return factory
def __init__(self, provider, args=(), kwargs={}):
assert self._master_lock.locked(), 'use .get(...) to obtain instances'
self.provider = provider
self.args = args
self.kwargs = kwargs
self.__threadlocal = threading.local()
@property
def lock(self):
"""Thread-local: dependendy issues will happen inside the same thread,
so don't compete with other threads."""
local = self.__threadlocal
try:
lock = local.lock
except AttributeError:
with self._master_lock:
lock = local.__dict__.setdefault('lock', threading.Lock())
return lock
def make(self):
""" Return a provider instance.
Raises : :cls:`MutualDependencyBreak`
If called recursively within the same thread, which happens
when mutually dependent providers try to instantiate each other.
"""
if self.lock.locked():
raise MutualDependencyBreak(self.provider)
with self.lock:
if isinstance(self.provider, (type, type(Python2OldStyleClass))):
return self.provider(*self.args, **self.kwargs)
return self.provider
class Python2OldStyleClass:
"""In Python2, I am a ``classobj`` which is not the same as a ``type``."""
pass
|
daedric/buck
|
scripts/diff_rulekeys_test.py
|
Python
|
apache-2.0
| 11,725
| 0.000597
|
import os
import unittest
import tempfile
from diff_rulekeys import *
class MockFile(object):
def __init__(self, lines):
self._lines = lines
def readlines(self):
return self._lines
class TestRuleKeyDiff(unittest.TestCase):
def test_key_value_diff(self):
list_diff = KeyValueDiff()
list_diff.append('l', 'r')
self.assertEqual(list_diff.diff(), ['-[l]', '+[r]'])
def test_key_value_diff_with_common_elements(self):
list_diff = KeyValueDiff()
l = ['a', 'b', 'c']
r = ['b', 'd', 'c']
for l, r in map(None, l, r):
list_diff.append(l, r)
self.assertEqual(list_diff.diff(), ['-[a]', '+[d]'])
def test_key_value_diff_with_common_elements_and_sort_issue(self):
list_diff = KeyValueDiff()
l = ['a', 'b', 'c']
r = ['c', 'd', 'b']
for l, r in map(None, l, r):
list_diff.append(l, r)
self.assertEqual(
list_diff.diff(),
['-[a]',
'+[d]',
'Only order of remaining entries differs: [b, c] vs [c, b].'])
def test_key_value_diff_with_common_elements_repetitions(self):
list_diff = KeyValueDiff()
l = ['a', 'b', 'b', 'c']
r = ['c', 'b', 'b', 'b']
for l, r in map(None, l, r):
list_diff.append(l, r)
self.assertEqual(
list_diff.diff(),
['-[a]',
'Order and repetition count of remaining entries differs: ' +
'[b, c] vs [c, b, b].'])
def test_key_value_diff_sort(self):
list_diff = KeyValueDiff()
list_diff.append('1', '2')
list_diff.append('2', '1')
self.assertEqual(
list_diff.diff(),
["Only order of entries differs: [1, 2] vs [2, 1]."])
def test_key_value_diff_case(self):
list_diff = KeyValueDiff()
list_diff.append('B', 'a')
list_diff.append('a', 'b')
self.assertEqual(
list_diff.diff(),
["Only order and letter casing (Upper Case vs lower case) of " +
"entries differs:", '-[B]', '+[b]'])
def test_key_value_diff_paths(self):
list_diff = KeyValueDiff()
list_diff.append('path(a.java:123)', 'path(a.java:322)')
list_diff.append('path(C.java:123)', 'path(c.java:123)')
list_diff.diff()
self.assertEqual(
set(list_diff.getInterestingPaths()),
set(['a.java', 'c.java', 'C.java']))
def test_structure_info(self):
line = ("[v] RuleKey 00aa=string(\"//:rule\"):key(name):" +
"number(1):key(version):string(\"Rule\"):key(buck.type):")
info = RuleKeyStructureInfo(MockFile([line]))
self.assertEqual(info.getNameForKey("00aa"), "//:rule")
def test_structure_info_list(self):
line = ("[v] RuleKey 00aa=string(\"//:rule\"):key(name):" +
"number(1):key(version):string(\"Rule\"):key(buck.type):" +
"number(1):key(num):number(2):key(num):")
info = RuleKeyStructureInfo(MockFile([line]))
self.assertEqual(
info.getByKey("00aa")['num'],
["number(2)", "number(1)"])
def test_simple_diff(self):
name = "//:lib"
result = diff(name,
RuleKeyStructureInfo(MockFile([
makeRuleKeyLine(
name=name,
key="aabb",
srcs={'JavaLib1.java': 'aabb'}
),
])),
RuleKeyStructureInfo(MockFile([
makeRuleKeyLine(
name=name,
key="cabb",
srcs={'JavaLib1.java': 'cabb'}
),
])),
verbose=False)
expected = [
'Change details for [//:lib]',
' (srcs):',
' -[path(JavaLib1.java:aabb)]',
' +[path(JavaLib1.java:cabb)]']
self.assertEqual(result, expected)
def test_diff_deps_order(self):
result = diff("//:top",
RuleKeyStructureInfo(MockFile([
makeRuleKeyLine(
name="//:top",
key="aa",
deps=["00", "10"],
),
makeRuleKeyLine(
name="//:Zero",
key="00",
|
srcs={"Zero": "0"}
),
makeRuleKeyLine(
name="//:One",
key="10",
srcs={"One": "0"}
),
])),
RuleKeyStructureInfo(MockFile([
makeRuleKeyLine(
name="//:top",
key="bb",
deps=["11", "0
|
1"],
),
makeRuleKeyLine(
name="//:Zero",
key="01",
srcs={"Zero": "0"}
),
makeRuleKeyLine(
name="//:One",
key="11",
srcs={"One": "1"}
),
])),
verbose=False)
expected = [
'Change details for [//:top]',
' (deps): order of deps was name-aligned.',
'Change details for [//:One]',
' (srcs):',
' -[path(One:0)]',
' +[path(One:1)]',
]
self.assertEqual(result, expected)
def test_diff_deps_count(self):
result = diff("//:top",
RuleKeyStructureInfo(MockFile([
makeRuleKeyLine(
name="//:top",
key="aa",
deps=["00"],
),
makeRuleKeyLine(
name="//:Zero",
key="00",
srcs={"Zero": "0"}
),
])),
RuleKeyStructureInfo(MockFile([
makeRuleKeyLine(
name="//:top",
key="bb",
deps=["11", "01"],
),
makeRuleKeyLine(
name="//:Zero",
key="01",
srcs={"Zero": "0"}
),
makeRuleKeyLine(
name="//:One",
key="11",
srcs={"One": "1"}
),
])),
verbose=False)
expected = [
'Change details for [//:top]',
' (deps):',
' -[<missing>]',
' +["//:One"@ruleKey(sha1=11)]',
]
self.assertEqual(result, expected)
def test_diff_doesnt_know(self):
result = diff("//:top",
RuleKeyStructureInfo(MockFile([
makeRuleKeyLine(
name="//:top",
key="aa",
),
])),
RuleKeyStructureInfo(MockFile([
makeRuleKeyLine(
name="//:top",
key="bb",
),
])),
verbose=False)
expected = ["I d
|
MartinHjelmare/home-assistant
|
tests/helpers/test_template.py
|
Python
|
apache-2.0
| 39,409
| 0
|
"""Test Home Assistant template helper methods."""
import asyncio
from datetime import datetime
import unittest
import random
import math
import pytz
from unittest.mock import patch
from homeassistant.components import group
from homeassistant.exceptions import TemplateError
from homeassistant.helpers import template
from homeassistant.util.unit_system import UnitSystem
from homeassistant.const import (
LENGTH_METERS,
TEMP_CELSIUS,
MASS_GRAMS,
PRESSURE_PA,
VOLUME_LITERS,
MATCH_ALL,
)
import homeassistant.util.dt as dt_util
from tests.common import get_test_home_assistant
import pytest
class TestHelpersTemplate(unittest.TestCase):
"""Test the Template."""
# pylint: disable=invalid-name
def setUp(self):
"""Set up the tests."""
self.hass = get_test_home_assistant()
self.hass.config.units = UnitSystem('custom', TEMP_CELSIUS,
LENGTH_METERS, VOLUME_LITERS,
MASS_GRAMS, PRESSURE_PA)
# pylint: disable=invalid-name
def tearDown(self):
"""Stop down stuff we started."""
self.hass.stop()
def test_referring_states_by_entity_id(self):
"""Test referring states by entity id."""
self.hass.states.set('test.object', 'happy')
assert 'happy' == \
template.Template(
'{{ states.test.object.state }}', self.hass).render()
def test_iterating_all_states(self):
"""Test iterating all states."""
self.hass.states.set('test.object', 'happy')
self.hass.states.set('sensor.temperature', 10)
assert '10happy' == \
template.Template(
'{% for state in states %}{{ state.state }}{% endfor %}',
self.hass).render()
def test_iterating_domain_states(self):
"""Test iterating domain states."""
self.hass.states.set('test.object', 'happy')
self.hass.states.set('sensor.back_door', 'open')
self.hass.states.set('sensor.temperature', 10)
assert 'open10' == \
template.Template("""
{% for state in states.sensor %}{{ state.state }}{% endfor %}
""", self.hass).render()
def test_float(self):
"""Test float."""
self.hass.states.set('sensor.temperature', '12')
assert '12.0' == \
template.Template(
'{{ float(states.sensor.temperature.state) }}',
self.hass).render()
assert 'True' == \
template.Template(
'{{ float(states.sensor.temperature.state) > 11 }}',
self.hass).render()
def test_rounding_value(self):
"""Test rounding value."""
self.hass.states.set('sensor.temperature', 12.78)
assert '12.8' == \
template.Template(
'{{ states.sensor.temperature.state | round(1) }}',
self.hass).render()
assert '128' == \
template.Template(
'{{ states.sensor.temperature.state | multiply(10) | round }}',
self.hass).render()
assert '12.7' == \
template.Template(
'{{ states.sensor.temperature.state | round(1, "floor") }}',
self.hass).render()
assert '12.8' == \
template.Template(
'{{ states.sensor.temperature.state | round(1, "ceil") }}',
self.hass).render()
def test_rounding_value_get_original_value_on_error(self):
"""Test rounding value get original value on error."""
assert 'None' == \
template.Template('{{ None | round }}', self.hass).render()
assert 'no_number' == \
template.Template(
'{{ "no_number" | round }}', self.hass).render()
def test_multiply(self):
"""Test multiply."""
tests = {
None: 'None',
10: '100',
'"abcd"': 'abcd'
}
for inp, out in tests.items():
assert out == \
template.Template('{{ %s | multiply(10) | round }}' % inp,
self.hass).render()
def test_logarithm(self):
"""Test logarithm."""
tests = [
(4, 2, '2.0'),
(1000, 10, '3.0'),
(math.e, '', '1.0'),
('"invalid"', '_', 'invalid'),
(10, '"invalid"', '10.0'),
]
for value, base, expected in tests:
assert expected == \
template.Template(
'{{ %s | log(%s) | round(1) }}' % (value, base),
self.hass).render()
assert expected == \
template.Template(
'{{ log(%s, %s) | round(1) }}' % (value, base),
self.hass).render()
def test_sine(self):
"""Test sine."""
tests = [
(0, '0.0'),
(math.pi / 2, '1.0'),
(math.pi, '0.0'),
(math.pi * 1.5, '-1.0'),
(math.pi / 10, '0.309')
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | sin | round(3) }}' % value,
self.hass).render()
def test_cos(self):
"""Test cosine."""
tests = [
(0, '1.0'),
(math.pi / 2, '0.0'),
(math.pi, '-1.0'),
(math.pi * 1.5, '-0.0'),
(math.pi / 10, '0.951')
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | cos | round(3) }}' % value,
self.hass).render()
def test_tan(self):
"""Test tangent."""
tests = [
(0, '0.0'),
(math.pi, '-0.0'),
(math.pi / 180 * 45, '1.0'),
(math.pi / 180 * 90, '1.633123935319537e+16'),
(math.pi / 180 * 135, '-1.0')
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | tan | round(3) }}' % value,
self.hass).render()
def test_sqrt(self):
"""Test square root."""
tests = [
(0, '0.0'),
(1, '1.0'),
(2, '1.414'),
(10, '3.162'),
(100, '10.0'),
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | sqrt | round(3) }}' % value,
self.hass).render()
def test_strptime(self):
"""Test the parse timestamp method."""
tests = [
('2016-10-19 15:22:05.588122 UTC',
'%Y-%m-%d %H:%M:%S.%f %Z', None),
('2016-10-19 15:22:05.588122+0100',
'%Y-%m-%d %H:%M:%S.%f%z', None),
('2016-10-19 15:22:05.588122',
'%Y-%m-%d %H:%M:%S.%f', None),
('2016-10-19', '%Y-%m-%d', None),
('2016', '%Y', None),
('15:22:05', '%H:%M:%S', None),
('1469119144', '%Y', '1469119144'),
('invalid',
|
'%Y', 'invalid')
]
for inp, fmt, expected in tests:
if expected is None:
expected = datetime.strptime(inp, fmt)
temp = '{{ strptime(\'%s\', \'%s\') }}' % (inp, fmt)
assert str(expected) == \
template.Template(temp, self.hass).render()
def test_timestamp_custom(self):
"""Test the t
|
imestamps to custom filter."""
now = dt_util.utcnow()
tests = [
(None, None, None, 'None'),
(1469119144, None, True, '2016-07-21 16:39:04'),
(1469119144, '%Y', True, '2016'),
(1469119144, 'invalid', True, 'invalid'),
(dt_util.as_timestamp(now), None, False,
now.strftime('%Y-%m-%d %H:%M:%S'))
]
for inp, fmt, local, out in tests:
if fmt:
fil = 'timestamp_custom(\'{}\')'.format(fmt)
elif fmt and local:
fil = 'timestamp_cu
|
jeremiah-c-leary/vhdl-style-guide
|
vsg/tests/package/test_rule_001.py
|
Python
|
gpl-3.0
| 1,229
| 0.004068
|
import os
import unittest
from vsg.rules import package
from vsg import vhdlFile
from vsg.tests import utils
sTestDir = os.path.dirname(__file__)
lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_001_test_input.vhd'))
dIndentMap = utils.read_indent_file()
lExpected = []
lExpected.append('')
utils.read_file(os.path.join(sTestDir, 'rule_001_test_input.fixed.vhd'), lExpected)
class test_package_rule(unittest.TestCase):
def setUp(self):
self.oFile = vhdlFile.vhdlFile(lFile)
self.assertIsNone(eError)
self.oFile.set_indent_map(dIndentMap)
def test_rule_001(self):
oRule = package.rule_001()
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'package')
self.assertEqual(oRule.identifier, '001')
lExpected = [6]
oRule.analyze(self.oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRul
|
e.violations))
def test_fix_rule_001(self):
oRule = package.rule_001()
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule
|
.violations, [])
|
zenoss/ZenPacks.community.OpenSolaris
|
ZenPacks/community/OpenSolaris/tests/plugindata/Solaris/server3/ifconfig.py
|
Python
|
gpl-2.0
| 393
| 0.007634
|
{"ifconfig":
{
"lo0": dict(
interfaceName='lo0',
mtu=8232,
|
operStatus=1,
adminStatus=1,
compname="os", ),
"xnf0": dict(
|
interfaceName='xnf0',
mtu=1500,
operStatus=1,
adminStatus=1,
speed=1000000000,
setIpAddresses=['10.209.191.4/23'],
compname="os", ),
}
}
|
Vijaysai005/KProject
|
vijay/POI_RLE/timedate.py
|
Python
|
gpl-3.0
| 709
| 0.03385
|
# usr/bin/env python
"""
Created on Fri Jun 23
@author : Vijayasai S
"""
def DateMonthYear(data):
year = [] ; month = [] ; date = []
for index in range(len(data["packettimestamp"])):
year.append(int(data["packettimestamp"][index][0:4]))
month.append(in
|
t(data["packettimestamp"][index][5:7]))
date.append(int(data["packettimestamp"][index][8:10]))
return date, month, year
def HoursMinutesSeconds(data):
hours = [] ; minutes = [] ; seconds = []
for index in range(len(data["packettimestamp"])):
hours.append(data["packettimestamp"][index][11:13])
minutes.append(data["packettimestamp"][index][14:16])
seconds.append(data["pac
|
kettimestamp"][index][17:-1])
return hours, minutes, seconds
|
flamingspaz/remo
|
remo/profiles/tasks.py
|
Python
|
bsd-3-clause
| 5,501
| 0.000364
|
from calendar import monthrange
from datetime import date
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core.mail import send_mail
from django.template.loader import render_to_string
from django.utils.timezone import now
import waffle
from remo.base.mozillians import BadStatusCode, MozilliansClient, ResourceDoesNotExist
from remo.base.tasks import send_remo_mail
from remo.base.utils import get_date, number2month
from remo.celery import app
from remo.dashboard.models import ActionItem
from remo.profiles.models import (UserProfile, UserStatus,
NOMINATION_ACTION_ITEM)
ROTM_REMINDER_DAY = 1
NOMINATION_END_DAY = 10
@app.task
def send_generic_mail(recipient_list, subject, email_template, data={}):
"""Send email to recipient_list rendered using email_template and populated
with data.
"""
data.update({'SITE_URL': settings.SITE_URL,
'FROM_EMAIL': settings.FROM_EMAIL})
message = render_to_string(email_template, data)
send_mail(subject, message, settings.FROM_EMAIL, recipient_list)
@app.task(task_soft_time_limit=600)
def check_mozillian_username():
mozillians = User.objects.filter(groups__name='Mozillians')
client = MozilliansClient(settings.MOZILLIANS_API_URL,
settings.MOZILLIANS_API_KEY)
for user in mozillians:
try:
data = client.lookup_user({'email': user.email})
except (BadStatusCode, ResourceDoesNotExist):
data = None
if data and data['is_vouched'] and
|
data['full_name']['privacy'] == 'Public':
full_name = data['full_name']['value']
first_name, last_name = (full_name.split(' ', 1)
if ' ' in full_name else ('', full_name))
user.first_name = first_name
user.last_name =
|
last_name
user.userprofile.mozillian_username = data['username']
else:
user.first_name = 'Anonymous'
user.last_name = 'Mozillian'
user.userprofile.mozillian_username = ''
if len(user.last_name) > 30:
user.last_name = user.last_name[:30]
if len(user.first_name) > 30:
user.first_name = user.first_name[:30]
user.save()
user.userprofile.save()
@app.task(task_ignore_result=False)
def check_celery():
"""Dummy celery task to check that everything runs smoothly."""
pass
@app.task
def reset_rotm_nominees():
"""Reset the Rep of the month nomination in user profiles.
This task will reset the nomination bit for the Rep of the month in the
user profiles, for all the users nominated in each month. This will take
place at the last day of each month.
"""
now_date = now().date()
days_of_month = monthrange(now_date.year, now_date.month)[1]
if (now_date == date(now_date.year, now_date.month, days_of_month) or
waffle.switch_is_active('enable_rotm_tasks')):
nominees = UserProfile.objects.filter(is_rotm_nominee=True)
for nominee in nominees:
nominee.is_rotm_nominee = False
nominee.rotm_nominated_by = None
nominee.save()
@app.task
def send_rotm_nomination_reminder():
""" Send an email reminder to all mentors.
The first day of each month, the mentor group receives an email reminder
in order to nominate Reps for the Rep of the month voting.
"""
now_date = now().date()
if (now_date.day == ROTM_REMINDER_DAY or
waffle.switch_is_active('enable_rotm_tasks')):
data = {'month': number2month(now_date.month)}
subject = 'Nominate Rep of the month'
template = 'emails/mentors_rotm_reminder.jinja'
send_remo_mail(subject=subject,
email_template=template,
recipients_list=[settings.REPS_MENTORS_LIST],
data=data)
mentors = User.objects.filter(groups__name='Mentor')
for mentor in mentors:
ActionItem.create(mentor.userprofile)
@app.task
def set_unavailability_flag():
"""Set the unavailable flag in UserStatus.
This task runs every 12 hours and sets the unavailable flag to True
in the case that a user has submitted a 'break notification' with a start
date in the future."""
(UserStatus.objects.filter(start_date__range=[get_date(-1), get_date()],
is_unavailable=False)
.update(is_unavailable=True))
@app.task
def resolve_nomination_action_items():
"""Resolve action items.
Resolve all the action items relevant to nomination reminders after the
10th day of each month.
"""
today = now().date()
if (today.day == NOMINATION_END_DAY or
waffle.switch_is_active('enable_rotm_tasks')):
mentors = UserProfile.objects.filter(user__groups__name='Mentor')
action_model = ContentType.objects.get_for_model(UserProfile)
# All the completed action items are always resolved
name = u'{0} {1}'.format(NOMINATION_ACTION_ITEM, today.strftime('%B'))
items = (ActionItem.objects.filter(content_type=action_model,
object_id__in=mentors,
name=name)
.exclude(completed=True))
items.update(resolved=True)
|
louispotok/pandas
|
pandas/core/indexes/category.py
|
Python
|
bsd-3-clause
| 31,573
| 0
|
import operator
import numpy as np
from pandas._libs import index as libindex
from pandas import compat
from pandas.compat.numpy import function as nv
from pandas.core.dtypes.generic import ABCCategorical, ABCSeries
from pandas.core.dtypes.dtypes import CategoricalDtype
from pandas.core.dtypes.common import (
is_categorical_dtype,
_ensure_platform_int,
is_list_like,
is_interval_dtype,
is_scalar)
from pandas.core.dtypes.missing import array_equivalent, isna
from pandas.core.algorithms import take_1d
from pandas.util._decorators import Appender, cache_readonly
from pandas.core.config import get_option
from pandas.core.indexes.base import Index, _index_shared_docs
from pandas.core import accessor
import pandas.core.common as com
import pandas.core.missing as missing
import pandas.core.indexes.base as ibase
_index_doc_kwargs = dict(ibase._index_doc_kwargs)
_index_doc_kwargs.update(dict(target_klass='CategoricalIndex'))
class CategoricalIndex(Index, accessor.PandasDelegate):
"""
Immutable Index implementing an ordered, sliceable set. CategoricalIndex
represents a sparsely populated Index with an underlying Categorical.
Parameters
----------
data : array-like or Categorical, (1-dimensional)
categories : optional, array-like
categories for the CategoricalIndex
ordered : boolean,
designating if the categories are ordered
copy : bool
Make a copy of
|
input ndarray
name : object
Name to be stored in the index
Attributes
----------
codes
categories
ordered
Methods
-------
rename_categories
reorder_categories
add_categories
remove_categories
remove_unused_categories
set_categories
as_ordered
as_unordered
map
See Also
--------
Categorical, Index
"""
_t
|
yp = 'categoricalindex'
_engine_type = libindex.Int64Engine
_attributes = ['name']
def __new__(cls, data=None, categories=None, ordered=None, dtype=None,
copy=False, name=None, fastpath=False):
if fastpath:
return cls._simple_new(data, name=name, dtype=dtype)
if name is None and hasattr(data, 'name'):
name = data.name
if isinstance(data, ABCCategorical):
data = cls._create_categorical(cls, data, categories, ordered,
dtype)
elif isinstance(data, CategoricalIndex):
data = data._data
data = cls._create_categorical(cls, data, categories, ordered,
dtype)
else:
# don't allow scalars
# if data is None, then categories must be provided
if is_scalar(data):
if data is not None or categories is None:
cls._scalar_data_error(data)
data = []
data = cls._create_categorical(cls, data, categories, ordered,
dtype)
if copy:
data = data.copy()
return cls._simple_new(data, name=name)
def _create_from_codes(self, codes, categories=None, ordered=None,
name=None):
"""
*this is an internal non-public method*
create the correct categorical from codes
Parameters
----------
codes : new codes
categories : optional categories, defaults to existing
ordered : optional ordered attribute, defaults to existing
name : optional name attribute, defaults to existing
Returns
-------
CategoricalIndex
"""
from pandas.core.arrays import Categorical
if categories is None:
categories = self.categories
if ordered is None:
ordered = self.ordered
if name is None:
name = self.name
cat = Categorical.from_codes(codes, categories=categories,
ordered=self.ordered)
return CategoricalIndex(cat, name=name)
@staticmethod
def _create_categorical(self, data, categories=None, ordered=None,
dtype=None):
"""
*this is an internal non-public method*
create the correct categorical from data and the properties
Parameters
----------
data : data for new Categorical
categories : optional categories, defaults to existing
ordered : optional ordered attribute, defaults to existing
dtype : CategoricalDtype, defaults to existing
Returns
-------
Categorical
"""
if (isinstance(data, (ABCSeries, type(self))) and
is_categorical_dtype(data)):
data = data.values
if not isinstance(data, ABCCategorical):
if ordered is None and dtype is None:
ordered = False
from pandas.core.arrays import Categorical
data = Categorical(data, categories=categories, ordered=ordered,
dtype=dtype)
else:
if categories is not None:
data = data.set_categories(categories, ordered=ordered)
elif ordered is not None and ordered != data.ordered:
data = data.set_ordered(ordered)
if isinstance(dtype, CategoricalDtype):
# we want to silently ignore dtype='category'
data = data._set_dtype(dtype)
return data
@classmethod
def _simple_new(cls, values, name=None, categories=None, ordered=None,
dtype=None, **kwargs):
result = object.__new__(cls)
values = cls._create_categorical(cls, values, categories, ordered,
dtype=dtype)
result._data = values
result.name = name
for k, v in compat.iteritems(kwargs):
setattr(result, k, v)
result._reset_identity()
return result
@Appender(_index_shared_docs['_shallow_copy'])
def _shallow_copy(self, values=None, categories=None, ordered=None,
dtype=None, **kwargs):
# categories and ordered can't be part of attributes,
# as these are properties
# we want to reuse self.dtype if possible, i.e. neither are
# overridden.
if dtype is not None and (categories is not None or
ordered is not None):
raise TypeError("Cannot specify both `dtype` and `categories` "
"or `ordered`")
if categories is None and ordered is None:
dtype = self.dtype if dtype is None else dtype
return super(CategoricalIndex, self)._shallow_copy(
values=values, dtype=dtype, **kwargs)
if categories is None:
categories = self.categories
if ordered is None:
ordered = self.ordered
return super(CategoricalIndex, self)._shallow_copy(
values=values, categories=categories,
ordered=ordered, **kwargs)
def _is_dtype_compat(self, other):
"""
*this is an internal non-public method*
provide a comparison between the dtype of self and other (coercing if
needed)
Raises
------
TypeError if the dtypes are not compatible
"""
if is_categorical_dtype(other):
if isinstance(other, CategoricalIndex):
other = other._values
if not other.is_dtype_equal(self):
raise TypeError("categories must match existing categories "
"when appending")
else:
values = other
if not is_list_like(values):
values = [values]
other = CategoricalIndex(self._create_categorical(
self, other, categories=self.categories, ordered=self.ordered))
if not other.isin(values).all():
raise TypeError("cannot append a non-category item to a "
"CategoricalIndex")
return othe
|
cropleyb/pentai
|
pentai/ai/t_rot_standardise.py
|
Python
|
mit
| 11,292
| 0.005756
|
#!/usr/bin/env python
import unittest
from pentai.base.game_state import *
from pentai.base.game import *
import pentai.base.player as p_m
from pentai.base.rules import *
from pentai.ai.rot_standardise import *
class RotStandardiseTest(unittest.TestCase):
def setUp(self):
self.rules = Rules(9, "standard")
self.game = Game(self.rules, p_m.Player("BC"), p_m.Player("Whoever"))
###################################################
# flip tests
def test_page_flip(self): # TODO: rename to flip
self.game.load_moves("1. (0,0)\n2. (3,3)\n3. (3,4)\n4. (5,4)")
gpf = page_flip(self.game.current_state)
brd = gpf.get_board()
self.assertEqual(brd.get_occ((8,0)), P1)
self.assertEqual(brd.get_occ((5,3)), P2)
self.assertEqual(brd.get_occ((5,4)), P1)
self.assertEqual(brd.get_occ((3,4)), P2)
def test_calendar_flip(self):
self.game.load_moves("1. (0,0)\n2. (3,3)\n3. (3,4)\n4. (5,4)")
gcf = calendar_flip(self.game.current_state)
brd = gcf.get_board()
self.assertEqual(brd.get_occ((0,8)), P1)
self.assertEqual(brd.get_occ((3,5)), P2)
self.assertEqual(brd.get_occ((3,4)), P1)
self.assertEqual(brd.get_occ((5,4)), P2)
def test_diagonal_flip(self):
""" i.e. swap x and y """
self.game.load_moves("1. (0,0)\n2. (3,3)\n3. (3,4)\n4. (5,4)")
gdf = diagonal_flip(self.game.current_state)
brd = gdf.get_board()
self.assertEqual(brd.get_occ((0,0)), P1)
self.assertEqual(brd.get_occ((3,3)), P2)
self.assertEqual(brd.get_occ((4,3)), P1)
self.assertEqual(brd.get_occ((4,5)), P2)
def test_diagonal_then_page(self):
self.game.load_moves("1. (0,0)\n2. (3,3)\n3. (3,4)\n4. (5,4)")
gdf = diagonal_flip(self.game.current_state)
gpf = page_flip(self.game.current_state)
brd = gpf.get_board()
self.assertEqual(brd.get_occ((8,0)), P1)
self.assertEqual(brd.get_occ((5,3)), P2)
self.assertEqual(brd.get_occ((4,3)), P1)
self.assertEqual(brd.get_occ((4,5)), P2)
def test_diagonal_then_calendar(self):
self.game.load_moves("1. (0,0)\n2. (3,3)\n3. (3,4)\n4. (5,4)")
gdf = diagonal_flip(self.game.current_state)
gcf = calendar_flip(self.game.current_state)
brd = gcf.get_board()
self.assertEqual(brd.get_occ((0,8)), P1)
self.assertEqual(brd.get_occ((3,5)), P2)
self.assertEqual(brd.get_occ((4,5)), P1)
self.assertEqual(brd.get_occ((4,3)), P2)
###################################################
# standardise position tests for 9x9
def test_standardise_SW_corner_pos(self):
self.game.load_moves("1. (0,0)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
def test_standardise_NW_corner_pos(self):
self.game.load_moves("1. (0,8)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
def test_standardise_NE_corner_pos(self):
self.game.load_moves("1. (8,8)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
def test_standardise_SE_corner_pos(self):
self.game.load_moves("1. (8,0)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
###################################################
# standardise position tests with two pieces
def test_standardise_SW_W(self):
self.game.load_moves("1. (0,0)\n2. (0, 4)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
self.assertEqual(brd.get_occ((4,8)), P2)
def test_standardise_SW_S(self):
self.game.load_moves("1. (0,0)\n2. (4, 0)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
self.assertEqual(brd.get_occ((4,8)), P2)
# !./t_standardise.py RotStandardiseTest.test_standardise_NW_W
def test_standardise_NW_W(self):
self.game.load_moves("1. (0,8)\n2. (0, 4)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
self.assertEqual(brd.get_occ((4,8)), P2)
# !./t_standardise.py RotStandardiseTest.test_standardise_NW_N
def test_standardise_NW_N(self):
self.game.load_moves("1. (0,8)\n2. (4, 8)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
self.assertEqual(brd.get_occ((4,8)), P2)
def test_standardise_NE_E(self):
self.game.load_moves("1. (8,8)\n2. (8, 4)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
self.assertEqual(brd.get_occ((4,8)), P2)
def test_standardise_NE_N(self):
self.game.load_m
|
oves("1. (8, 8)\n2. (4, 8)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
self.assertEqual(brd.get_occ((4,8)), P2)
def test_standardise_SE_E(self):
self.game.load_moves("1. (8, 0)\n2. (8, 4)")
std, fwd, rev = standardise(self.
|
game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
self.assertEqual(brd.get_occ((4,8)), P2)
def test_standardise_SE_S(self):
self.game.load_moves("1. (8, 0)\n2. (4, 0)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,8)), P1)
self.assertEqual(brd.get_occ((4,8)), P2)
class RotStandardisePositionTest(unittest.TestCase):
###################################################
# standardise position tests
def setUp(self):
self.rules = Rules(19, "standard")
self.game = Game(self.rules, p_m.Player("BC"), p_m.Player("Whoever"))
def test_standardise_SW_corner_pos(self):
self.game.load_moves("1. (0,0)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,18)), P1)
def test_standardise_NW_corner_pos(self):
self.game.load_moves("1. (0,18)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,18)), P1)
def test_standardise_NE_corner_pos(self):
self.game.load_moves("1. (18,18)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0, 0])
self.assertEqual(brd.get_occ((0,18)), P1)
def test_standardise_SE_corner_pos(self):
self.game.load_moves("1. (18,0)")
std, fwd, rev = standardise(self.game.current_state)
brd = std.get_board()
self.assertEqual(std.get_all_captured(), [0, 0,
|
brentp/methylcode
|
methylcoder/tests/test_index.py
|
Python
|
bsd-3-clause
| 2,099
| 0.003335
|
import unittest
import os.path as op
import sys
import os
from methylcoder.fastqindex import FastQIndex, FastaIndex, FastQEntry, FastaEntry, guess_index_class
import bsddb
PATH = op.dirname(__file__)
DATA = op.join(PATH, "data")
class GuesserTest(unittest.TestCase):
def setUp(self):
self.fastq = op.join(DATA, "sample.fastq.test")
self.fasta = op.join(DATA, "sample.fasta.test")
def test_fastq(self):
self.assertEquals(FastQIndex, guess_index_class(self.fastq))
def test_fasta(self):
self.assertEquals(FastaIndex, guess_index_class(self.fasta))
def test_bad(self):
self.assertRaises(AssertionError, guess_index_class, self.fasta + "ASDF")
class FastQIndexTest(unittest.TestCase):
base_file = "sample.fastq.test"
klass = FastQIndex
header_start = "@"
def setUp(self):
self.path = op.join(DATA, self.base_file)
self.idx_path = self.path + self.klass.ext
def test_create(self):
self.assert_(op.exists(self.path))
fi = self.klass(self.path)
self.assert_(op.exists(self.idx_path))
def test_len(self):
fi = self.klass(self.path)
nlines = sum(1 for i in open(self.path))
self.assertEqual(nlines / self.klass.entry_class.lines, len(fi))
def test_contains(self):
fi = self.klass(self.path)
for header in (line.strip() for line in open(self.path) \
if line[0] == self.header_start):
self.assert_(header[1:] in fi, (header, fi.iterkeys().next(
|
)))
def test_sequence(self):
fi = self.klass(self.path)
key, pos = iter(fi).next()
obj = fi[key]
pos = int(pos)
fh = open(self.path, "r")
fh.seek(pos)
entry = self.klass.entry_class(fh)
self.assertEquals(obj.seq, entry.seq, (obj, entry, pos))
def tearDown(self):
os.unlink(self.idx_path)
class FastaIndexTest(FastQIndexTest):
base_file = "sample.fasta.test"
klass = FastaIndex
header_start = ">"
if
|
__name__ == "__main__":
unittest.main()
|
aldryn/aldryn-blog
|
aldryn_blog/__init__.py
|
Python
|
bsd-3-clause
| 100
| 0
|
# -*- coding: utf-8 -*-
__version__ = '0.5.0'
request_post_identifie
|
r = 'current_aldryn_b
|
log_entry'
|
michaelBenin/sqlalchemy
|
lib/sqlalchemy/testing/requirements.py
|
Python
|
mit
| 17,967
| 0.001169
|
# testing/requirements.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Global database feature support policy.
Provides decorators to mark tests requiring specific feature support from the
target database.
External dialect test suites should subclass SuiteRequirements
to provide specific inclusion/exclusions.
"""
from . import exclusions
class Requirements(object):
pass
class SuiteRequirements(Requirements):
@property
def create_table(self):
"""target platform can emit basic CreateTable DDL."""
return exclusions.open()
@property
def drop_table(self):
"""target platform can emit basic DropTable DDL."""
return exclusions.open()
@property
def foreign_keys(self):
"""Target database must support foreign keys."""
return exclusions.open()
@property
def on_update_cascade(self):
""""target database must support ON UPDATE..CASCADE behavior in
foreign keys."""
return exclusions.open()
@property
def non_updating_cascade(self):
"""target database must *not* support ON UPDATE..CASCADE behavior in
foreign keys."""
return exclusions.closed()
@property
def deferrable_fks(self):
return exclusions.closed()
@property
def on_update_or_deferrable_fks(self):
# TODO: exclusions should be composable,
# somehow only_if([x, y]) isn't working here, negation/conjunctions
# getting confused.
return exclusions.only_if(
lambda: self.on_update_cascade.enabled or self.deferrable_fks.enabled
)
@property
def self_referential_foreign_keys(self):
"""Target database must support self-referential foreign keys."""
return exclusions.open()
@property
def foreign_key_ddl(self):
"""Target database must support the DDL phrases for FOREIGN KEY."""
return exclusions.open()
@property
def named_constraints(self):
"""target database must support names for constraints."""
return exclusions.open()
@property
def subqueries(self):
"""Target database must support subqueries."""
return exclusions.open()
@property
def offset(self):
"""target database can render OFFSET, or an equivalent, in a SELECT."""
return exclusions.open()
@property
def bound_limit_offset(self):
"""target database can render LIMIT and/or OFFSET using a bound parameter"""
return exclusions.open()
@property
def boolean_col_expressions(self):
"""Target database must support boolean expressions as columns"""
return exclusions.closed()
@property
def nullsordering(self):
"""Target backends that support nulls ordering."""
return exclusions.closed()
@property
def standalone_binds(self):
"""target database/driver supports bound parameters as column expressions
without being in the context of a typed column.
"""
return exclusions.closed()
@property
def intersect(self):
"""Target database must support INTERSECT or equivalent."""
return exclusions.closed()
@property
def except_(self):
"""Target database must support EXCEPT or equivalent (i.e. MINUS)."""
return exclusions.closed()
@property
def window_functions(self):
"""Target database must support window functions."""
return exclusions.closed()
@property
def autoincrement_insert(self):
"""target platform generates new surrogate integer primary key values
when insert() is executed, excluding the pk column."""
return exclusions.open()
@property
def fetch_rows_post_commit(self):
"""target platform will allow cursor.fetchone() to proceed after a
COMMIT.
Typically this refers to an INSERT statement with RETURNING which
is invoked within "autocommit". If the row can be returned
after the autocommit, then this rule can be open.
"""
return exclusions.open()
@property
def empty_inserts(self):
"""target platform supports INSERT with no values, i.e.
INSERT DEFAULT VALUES or equivalent."""
return exclusions.only_if(
lambda config: config.db.dialect.supports_empty_insert or \
config.db.dialect.supports_default_values,
"empty inserts not supported"
)
@property
def insert_from_select(self):
"""target platform supports INSERT from a SELECT."""
return exclusions.open()
@property
def returning(self):
"""target platform supports RETURN
|
ING."""
return exclusions.only_if(
lambda config: config.db.dialect.implicit_returning,
"'returning' not supported by database"
)
@property
def duplicate_names_in_cursor_description(self):
|
"""target platform supports a SELECT statement that has
the same name repeated more than once in the columns list."""
return exclusions.open()
@property
def denormalized_names(self):
"""Target database must have 'denormalized', i.e.
UPPERCASE as case insensitive names."""
return exclusions.skip_if(
lambda config: not config.db.dialect.requires_name_normalize,
"Backend does not require denormalized names."
)
@property
def multivalues_inserts(self):
"""target database must support multiple VALUES clauses in an
INSERT statement."""
return exclusions.skip_if(
lambda config: not config.db.dialect.supports_multivalues_insert,
"Backend does not support multirow inserts."
)
@property
def implements_get_lastrowid(self):
""""target dialect implements the executioncontext.get_lastrowid()
method without reliance on RETURNING.
"""
return exclusions.open()
@property
def emulated_lastrowid(self):
""""target dialect retrieves cursor.lastrowid, or fetches
from a database-side function after an insert() construct executes,
within the get_lastrowid() method.
Only dialects that "pre-execute", or need RETURNING to get last
inserted id, would return closed/fail/skip for this.
"""
return exclusions.closed()
@property
def dbapi_lastrowid(self):
""""target platform includes a 'lastrowid' accessor on the DBAPI
cursor object.
"""
return exclusions.closed()
@property
def views(self):
"""Target database must support VIEWs."""
return exclusions.closed()
@property
def schemas(self):
"""Target database must support external schemas, and have one
named 'test_schema'."""
return exclusions.closed()
@property
def sequences(self):
"""Target database must support SEQUENCEs."""
return exclusions.only_if([
lambda config: config.db.dialect.supports_sequences
], "no sequence support")
@property
def sequences_optional(self):
"""Target database supports sequences, but also optionally
as a means of generating new PK values."""
return exclusions.only_if([
lambda config: config.db.dialect.supports_sequences and \
config.db.dialect.sequences_optional
], "no sequence support, or sequences not optional")
@property
def reflects_pk_names(self):
return exclusions.closed()
@property
def table_reflection(self):
return exclusions.open()
@property
def view_column_reflection(self):
"""target database must support retrieval of the columns in a view,
similarly to how a table i
|
rhlobo/github_jackdaw
|
server/blueprints/api.py
|
Python
|
mit
| 2,205
| 0.006349
|
import re
import json
import flask
_URIPATH_REGEX = re.compile(r'http[s]?://[^/]+/(.*)')
def new_blueprint(github, basic_auth):
blueprint = flask.Blueprint('api', __name__, url_prefix='/api')
@blueprint.route('/status')
@basic_auth.required
def status():
data = {}
user = github.get('user')
data['user'] = user['login']
data['email'] = user['
|
email']
data['orgs'] = []
for organization in github.get(_remove_host(user['organizations_url'])):
orgdata = {
'name': organization['login'],
'avatar': organization['avatar_url']
}
orgdata['hooks'] = github.get('orgs/%s/hooks' % organization['login'], headers={'Accept': 'application/vnd.github.sersi-preview+json'})
|
data['orgs'].append(orgdata)
return flask.Response(json.dumps(data), content_type='application/json')
@blueprint.route('/hook/<org>', methods=['POST'])
@basic_auth.required
def createhook(org):
hook_registration = {
'name': 'web',
'active': True,
'events': ['push'],
'config': {
'url': 'https://webhooks.chaordicsystems.com/hooks/push_and_pr',
'content_type': 'json'
}
}
github.request('POST', 'orgs/%s/hooks' % org,
data=json.dumps(hook_registration),
headers={'Accept': 'application/vnd.github.sersi-preview+json',
'Content-Type': 'application/json'})
return status()
@blueprint.route('/hook/<org>', methods=['DELETE'])
@basic_auth.required
def deletehook(org):
hooks = github.get('orgs/%s/hooks' % org, headers={'Accept': 'application/vnd.github.sersi-preview+json'})
for hook in hooks:
try:
github.delete('orgs/%s/hooks/%s' % (org, hook['id']),
headers={'Accept': 'application/vnd.github.sersi-preview+json'})
except:
pass
return status()
def _remove_host(url):
return _URIPATH_REGEX.search(url).group(1)
return blueprint
|
munizao/mate-workspace-name-applet
|
wsname_applet.py
|
Python
|
gpl-2.0
| 7,780
| 0.0063
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#Uncomment to debug. If you aren't me, I bet you want to change the paths, too.
import sys
from wsnamelet import wsnamelet_globals
if wsnamelet_globals.debug:
sys.stdout = open ("/home/munizao/hacks/wsnamelet/debug.stdout", "w", buffering=1)
sys.stderr = open ("/home/munizao/hacks/wsnamelet/debug.stderr", "w", buffering=1)
import gi
gi.require_version("Gtk", "3.0")
gi.require_version("MatePanelApplet", "4.0")
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GObject
from gi.repository import Pango
from gi.repository import MatePanelApplet
gi.require_version ("Wnck", "3.0")
from gi.repository import Wnck
from gi.repository import Gio
#Internationalize
import locale
import gettext
gettext.bindtextdomain('wsnamelet', wsnamelet_globals.localedir)
gettext.textdomain('wsnamelet')
locale.bindtextdomain('wsnamelet', wsnamelet_globals.localedir)
locale.textdomain('wsnamelet')
gettext.install('wsnamelet', wsnamelet_globals.localedir)
#screen = None
class WSNamePrefs(object):
def __init__(self, applet):
self.applet = applet
self.dialog = Gtk.Dialog("Workspace Name Applet Preferences",
None,
Gtk.DialogFlags.DESTROY_WITH_PARENT,
(Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE))
self.dialog.set_border_width(10)
width_spin_label = Gtk.Label(label=_("Applet width in pixels:"))
width_adj = Gtk.Adjustment(lower=30, upper=500, step_incr=1)
self.width_spin_button = Gtk.SpinButton.new(width_adj, 0.0, 0)
self.applet.settings.bind("width", self.width_spin_button, "value", Gio.SettingsBindFlags.DEFAULT)
width_spin_hbox = Gtk.HBox()
width_spin_hbox.pack_start(width_spin_label, True, True, 0)
width_spin_hbox.pack_start(self.width_spin_button, True, True, 0)
self.dialog.vbox.add(width_spin_hbox)
class WSNameEntry(Gtk.Entry):
def __init__(self, applet):
Gtk.Widget.__init__(self)
self.connect("activate", self._on_activate)
self.connect("key-release-event", self._on_key_release)
self.applet = applet
def _on_activate(self, event):
text = self.get_text()
self.applet.workspace.change_name(text)
self.applet.label.set_text(text)
self.applet.exit_editing()
def _on_key_release(self, widget, event):
if event.keyval == Gdk.KEY_Escape:
self.applet.exit_editing()
class WSNameApplet(MatePanelApplet.Applet):
_name_change_handler_id = None
workspace = None
settings = None
prefs = None
width = 100
editing = False
def __init__(self, applet):
self.applet = applet;
menuxml = """
<menuitem name="Prefs" action="Prefs" />
<menuitem name="About" action="About" />
"""
actions = [("Prefs", Gtk.STOCK_PREFERENCES, "Preferences", None, None, self._display_prefs),
("About", Gtk.STOCK_ABOUT, "About", None, None, self._display_about)]
actiongroup = Gtk.ActionGroup.new("WsnameActions")
actiongroup.add_actions(actions, None)
applet.setup_menu(menuxml, actiongroup)
self.init()
def _display_about(self, action):
about = Gtk.AboutDialog()
about.set_program_name("Workspace Name Applet")
about.set_version(wsnamelet_globals.version)
about.set_copyright("© 2006 - 2015 Alexandre Muñiz")
about.set_comments("View and change the name of the current workspace.\n\nTo change the workspace name, click on the applet, type the new name, and press Enter.")
about.set_website("https://github.com/munizao/mate-workspace-name-applet")
about.connect ("response", lambda self, *args: self.destroy ())
about.show_all()
def _display_prefs(self, action):
self.prefs.dialog.show_all()
self.prefs.dialog.run()
self.prefs.dialog.hide()
def set_width(self, width):
self.width = width
self.button.set_size_request(width, -1)
self.button.queue_resize()
self.entry.set_size_request(width, -1)
self.entry.queue_resize()
def on_width_changed(self, settings, key):
width = settings.get_int(key)
self.set_width(width)
def init(self):
self.button = Gtk.Button()
self.button.connect("button-press-event", self._on_button_press)
self.button.connect("button-release-event", self._on_button_release)
self.label = Gtk.Label()
self.label.set_ellipsize(Pango.EllipsizeMode.END)
self.applet.add(self.button)
self.button.add(self.label)
self.entry = WSNameEntry(self)
self.entry.connect("button-press-event", self._on_entry_button_press)
try:
self.settings = Gio.Settings.new("com.puzzleapper.wsname-applet-py")
self.set_width(self.settings.get_int("width"))
self.settings.connect("changed::width", self.on_width_changed)
except:
self.set_width(100)
self.screen = Wnck.Screen.get_default()
self.workspace = really_get_active_workspace(self.screen)
self.screen.connect("active_workspace_changed", self._on_workspace_changed)
self.button.set_tooltip_text(_("Click to change the name of the current workspace"))
self._name_change_handler_id = None
self.prefs = WSNamePrefs(self)
self.show_workspace_name()
self.applet.show_all()
return True
def _on_button_press(self, button, event, data=None):
if event.button != 1:
button.stop_emission("button-press-event")
def _on_button_release(self, button, event, data=None):
if event.type == Gdk.EventType.BUTTON_RELEASE and event.button == 1:
self.editing = True
self.applet.remove(self.button)
self.applet.add(self.entry)
self.entry.set_text(self.workspace.get_name())
self.entry.set_position(-1)
self.entry.select_region(0, -1)
self.applet.request_focus(event.time)
GObject.timeout_add(0, self.entry.grab_focus)
self.applet.show_all()
def _on_entry_button_press(self, entry, event, data=None):
self.applet.request_focus(event.time)
def _on_workspace_changed(self, event, old_workspace):
if self.editing:
self.exit_editing()
if (self._name_change_handler_id):
self.workspace.disconnect(self._name_change_handler_id)
self.workspace = really_get_active_workspace(self.screen)
self._name_change_handler_id = self.workspace.connect("name-changed",
|
self._on_workspace_name_changed)
self.show_workspace_name()
def _on_workspace_name_changed(self, event):
self.show_workspace_name()
def show_workspace_name(self):
if self.workspace:
self.label.set_text(self.workspace.get_name())
self.applet.show_all()
def exit_editing(self):
self.editing = False
self.applet.remove(self.entry)
self.applet.add(self.button)
def really_get_active_workspace(screen):
# Thi
|
s bit is needed because wnck is asynchronous.
while Gtk.events_pending():
Gtk.main_iteration()
return screen.get_active_workspace()
def applet_factory(applet, iid, data):
WSNameApplet(applet)
return True
MatePanelApplet.Applet.factory_main("WsnameAppletFactory",
True,
MatePanelApplet.Applet.__gtype__,
applet_factory,
None)
|
atsuyim/OpenBazaar
|
node/backuptool.py
|
Python
|
mit
| 4,817
| 0
|
import tarfile
import time
import os
import json
class BackupTool(object):
"""Simple backup utility."""
def __init__(self):
pass
@staticmethod
def backup(openbazaar_installation_path,
backup_folder_path,
on_success_callback=None,
on_error_callback=None):
"""
Creates an 'openbazaar-YYYY-MM-DD-hh-mm-ss.tar.gz' file
inside the html/backups/ folder.
@param openbazaar_installation_path: str
The path to OpenBazaar's installation folder,
where the db/ folder lives.
@param backup_folder_path: str
The folder where the backup file will reside.
Optional callback functions can be passed:
@param on_success_callback(backupFilePath: str)
@param on_error_callback(errorMessage: str)
"""
date_time = time.strftime('%Y-%h-%d-%H-%M-%S')
output_file_path = os.path.join(
backup_folder_path,
"openbazaar-%s.tar.gz" % date_time
)
# Create the folder for the backup, if it doesn't exist.
try:
os.makedirs(backup_folder_path)
|
except os.error:
pass
db_folder = os.path.join(openbazaar_installation_path, "db")
try:
with tarfile.open(output_file_path, "w:gz") as tar:
tar.add(db_folder, arcname=os.path.basename(db_folder))
except tarfile.TarError as exc:
# TODO: Install proper error logging.
print "Error while backing up to:", output_file_path
if on_error_callback is not None:
|
on_error_callback(exc)
return
if on_success_callback is not None:
on_success_callback(output_file_path)
@staticmethod
def restore(backup_tar_filepath):
raise NotImplementedError
@staticmethod
def get_installation_path():
"""Return the Project Root path."""
file_abs_path = os.path.abspath(__file__)
real_file_abs_path = os.path.realpath(file_abs_path)
return real_file_abs_path[:real_file_abs_path.find('/node')]
@classmethod
def get_backup_path(cls):
"""Return the backup path."""
# TODO: Make backup path configurable on server settings.
return os.path.join(
cls.get_installation_path(), 'html', 'backups'
)
class Backup(json.JSONEncoder):
"""
A (meant to be immutable) POPO to represent a backup.
So that we can tell our Web client about the backups available.
"""
def __init__(self,
file_name=None,
full_file_path=None,
created_timestamp_millis=None,
size_in_bytes=None):
super(Backup, self).__init__()
self.file_name = file_name
self.full_file_path = full_file_path
self.created_timestamp_millis = created_timestamp_millis
self.size_in_bytes = size_in_bytes
def to_dict(self):
"""Return a dictionary with attributes of self."""
return {
"file_name": self.file_name,
"full_file_path": self.full_file_path,
"created_timestamp_millis": self.created_timestamp_millis,
"size_in_bytes": self.size_in_bytes
}
def __repr__(self):
return repr(self.to_dict())
@classmethod
def get_backups(cls, backup_folder_path=None):
"""
Return a list of Backup objects found in the backup folder path given.
"""
if backup_folder_path is None or not os.path.isdir(backup_folder_path):
return []
result_gen = (
cls.get_backup(os.path.join(backup_folder_path, x))
for x in os.listdir(backup_folder_path)
)
result = [backup for backup in result_gen if backup is not None]
result.reverse()
return result
@classmethod
def get_backup(cls, backup_file_path):
"""
Create and return a Backup object from a backup path.
Return None if the path was invalid.
"""
try:
file_stat = os.stat(backup_file_path)
file_name = os.path.basename(backup_file_path)
except os.error:
print "Invalid backup path:", backup_file_path
return None
created_timestamp_millis = file_stat.st_ctime
size_in_bytes = file_stat.st_size
return cls(
file_name=file_name,
full_file_path=backup_file_path,
created_timestamp_millis=created_timestamp_millis,
size_in_bytes=size_in_bytes
)
class BackupJSONEncoder(json.JSONEncoder):
# pylint: disable=method-hidden
def default(self, o):
if isinstance(o, Backup):
return o.to_dict()
|
zetaops/ulakbus
|
ulakbus/views/bap/bap_iletisim.py
|
Python
|
gpl-3.0
| 1,146
| 0
|
# -*- coding: utf-8 -*-
# Copyright (C) 2015 ZetaOps Inc.
#
# This file is licensed under the GNU General Public License v3
# (GPLv3). See LICENSE.txt for details.
from ulakbus.models import AbstractRole
from ulakbus.models import Personel
from ulakbus.models import Role
from zengine.views.crud import Crud
|
View
from zengine.lib.translation import gettext as _
class BAPIletisimView(CrudView):
def iletisim_bilgilerini_goster(self):
self.current.output["meta"]["allow_search"] =
|
False
self.current.output["meta"]["allow_actions"] = False
self.output['object_title'] = _(u"BAP Koordinatörlüğü İletişim")
self.output['objects'] = [
[_(u"Ad Soyad"), _(u"Telefon"), _(u"E-posta")]
]
abstract_role = AbstractRole.objects.get(
name='Bilimsel Arastirma Projesi - Koordinasyon Birimi')
for r in Role.objects.all(abstract_role_id=abstract_role.key):
p = Personel.objects.get(user=r.user())
self.output['objects'].append({
"fields": [p.__unicode__(), p.oda_tel_no, p.e_posta],
"actions": []
})
|
will4906/PatentCrawler
|
service/__init__.py
|
Python
|
apache-2.0
| 413
| 0.002421
|
# -*- coding: utf-8 -*-
"""
Created on 2017/3/24
@author: will4906
"""
# from logbook import Logger
|
# from service import log
# # # from service.account import *
# # # from service.proxy import *
# #
# # logger = Logger('main')
# #
# # if __name__ == '__main__':
# # # stupid()
# # # update_proxy()
# # # notify_ip_address()
# # # update_cookies()
# from service.account import login
#
|
# login()
|
tomsik68/kodi-putlocker-tvshows
|
examples/putlockertest.py
|
Python
|
mit
| 357
| 0
|
from putlocker import Putlocker
putlocker = Putlocker()
series
|
= putlocker.search('Gold Rush')
serie = series[0]
print(serie.getName())
print(serie.getImageUrl())
print(serie.url)
print('-' * 80)
seasons = serie.getSeasons()
season = seasons[0]
episodes
|
= season.getEpisodes()
for episode in episodes:
print(episode.getName(), episode.getVideoLink())
|
quantumlib/Cirq
|
cirq-core/cirq/transformers/target_gatesets/sqrt_iswap_gateset_test.py
|
Python
|
apache-2.0
| 13,585
| 0.001914
|
# Copyright 2022 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
import cirq
import pytest
import sympy
import numpy as np
def all_gates_of_type(m: cirq.Moment, g: cirq.Gateset):
for op in m:
if op not in g:
return False
return True
def assert_optimizes(before: cirq.Circuit, expected: cirq.Circuit, **kwargs):
cirq.testing.assert_same_circuits(
cirq.optimize_for_target_gateset(before, gateset=cirq.SqrtIswapTargetGateset(**kwargs)),
expected,
)
def assert_optimization_not_broken(
circuit: cirq.Circuit, required_sqrt_iswap_count: Optional[int] = None
):
c_new = cirq.optimize_for_target_gateset(
circuit,
gateset=cirq.SqrtIswapTargetGateset(required_sqrt_iswap_count=required_sqrt_iswap_count),
)
cirq.testing.assert_circuits_with_terminal_measurements_are_equivalent(
circuit, c_new, atol=1e-6
)
c_new = cirq.optimize_for_target_gateset(
circuit,
gateset=cirq.SqrtIswapTargetGateset(
use_sqrt_iswap_inv=True, required_sqrt_iswap_count=required_sqrt_iswap_count
),
)
cirq.testing.assert_circuits_with_terminal_measurements_are_equivalent(
circuit, c_new, atol=1e-6
)
def test_convert_to_sqrt_iswap_preserving_moment_structure():
q = cirq.LineQubit.range(5)
op = lambda q0, q1: cirq.H(q1).controlled_by(q0)
c_orig = cirq.Circuit(
cirq.Moment(cirq.X(q[2])),
cirq.Moment(op(q[0], q[1]), op(q[2], q[3])),
cirq.Moment(op(q[2], q[1]), op(q[4], q[3])),
cirq.Moment(op(q[1], q[2]), op(q[3], q[4])),
cirq.Moment(op(q[3], q[2]), op(q[1], q[0])),
cirq.measure(*q[:2], key="m"),
cirq.X(q[2]).with_classical_controls("m"),
cirq.CZ(*q[3:]).with_classical_controls("m"),
)
c_new = cirq.optimize_for_target_gateset(c_orig, gateset=cirq.SqrtIswapTargetGateset())
assert c_orig[-2:] == c_new[-2:]
c_orig, c_new = c_orig[:-2], c_new[:-2]
cirq.testing.assert_circuits_with_terminal_measurements_are_equivalent(c_orig, c_new, atol=1e-6)
assert all(
(
all_gates_of_type(m, cirq.Gateset(cirq.AnyUnitaryGateFamily(1)))
or all_gates_of_type(m, cirq.Gateset(cirq.SQRT_ISWAP))
)
for m in c_new
)
c_new = cirq.optimize_for_target_gateset(
c_orig, gateset=cirq.SqrtIswapTargetGateset(use_sqrt_iswap_inv=True), ignore_failures=False
)
cirq.testing.assert_circuits_with_terminal_measurements_are_equivalent(c_orig, c_new, atol=1e-6)
assert all(
(
all_gates_of_type(m, cirq.Gateset(cirq.AnyUnitaryGateFamily(1)))
or all_gates_of_type(m, cirq.Gateset(cirq.SQRT_ISWAP_INV))
)
for m in c_new
)
@pytest.mark.parametrize(
'gate',
[
cirq.CNotPowGate(exponent=sympy.Symbol('t')),
cirq.PhasedFSimGate(theta=sympy.Symbol('t'), chi=sympy.Symbol('t'), phi=sympy.Symbol('t')),
],
)
@pytest.mark.parametrize('use_sqrt_iswap_inv', [True, False])
def test_two_qubit_gates_with_symbols(gate: cirq.Gate, use_sqrt_iswap_inv: bool):
# Note that even though these gates are not natively supported by
# `cirq.parameterized_2q_op_to_sqrt_iswap_operations`, the transformation succeeds because
# `cirq.optimize_for_target_gateset` also relies on `cirq.decompose` as a fallback.
c_orig = cirq.Circuit(gate(*cirq.LineQubit.range(2)))
c_new = cirq.optimize_for_target_gateset(
c_orig, gateset=cirq.SqrtIswapTargetGateset(use_sqrt_iswap_inv=use_sqrt_iswap_inv)
)
# Check that `c_new` only contains sqrt iswap as the 2q entangling gate.
sqrt_iswap_gate = cirq.SQRT_ISWAP_INV if use_sqrt_iswap_inv else cirq.SQRT_ISWAP
for op in c_new.all_operations():
if cirq.num_qubits(op) == 2:
assert op.gate == sqrt_iswap_gate
# Check if unitaries are the same
for val in np.linspace(0, 2 * np.pi, 10):
cirq.testing.assert_circuits_with_terminal_measurements_are_equivalent(
cirq.resolve_parameters(c_orig, {'t': val}),
cirq.resolve_parameters(c_new, {'t': val}),
atol=1e-6,
)
def test_sqrt_iswap_gateset_raises():
with pytest.raises(ValueError, match="`required_sqrt_iswap_count` must be 0, 1, 2, or 3"):
_ = cirq.SqrtIswapTargetGateset(required_sqrt_iswap_count=4)
def test_sqrt_iswap_gateset_eq():
eq = cirq.testing.EqualsTester()
eq.add_equality_group(
cirq.SqrtIswapTargetGateset(), cirq.SqrtIswapTargetGateset(use_sqrt_iswap_inv=False)
)
eq.add_equality_group(
cirq.SqrtIswapTargetGateset(atol=1e-6, required_sqrt_iswap_count=0, use_sqrt_iswap_inv=True)
)
eq.add_equality_group(
cirq.SqrtIswapTargetGateset(atol=1e-6, required_sqrt_iswap_count=3, use_sqrt_iswap_inv=True)
)
@pytest.mark.parametrize(
'gateset',
[
cirq.SqrtIswapTargetGateset(),
cirq.SqrtIswapTargetGateset(
atol=1e-6, required_sqrt_iswap_count=2, use_sqrt_iswap_inv=True
),
],
)
def test_sqrt_iswap_gateset_repr(gateset):
cirq.testing.assert_equivalent_repr(gateset)
def test_simplifies_sqrt_iswap():
a, b = cirq.LineQubit.range(2)
assert_optimizes(
before=cirq.Circuit(
[
# SQRT_ISWAP**8 == Identity
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
]
),
expected=cirq.Circuit(
[
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
]
),
)
def test_simplifies_sqrt_iswap_inv():
a, b = cirq.LineQubit.range(2)
assert_optimizes(
use_sqrt_iswap_inv=True,
before=cirq.Circuit(
[
# SQRT_ISWAP**8 == Identity
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([ci
|
rq.SQRT_ISWAP_INV(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a
|
, b)]),
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
]
),
expected=cirq.Circuit(
[
cirq.Moment([cirq.SQRT_ISWAP_INV(a, b)]),
]
),
)
def test_works_with_tags():
a, b = cirq.LineQubit.range(2)
assert_optimizes(
before=cirq.Circuit(
[
cirq.Moment([cirq.SQRT_ISWAP(a, b).with_tags('mytag1')]),
cirq.Moment([cirq.SQRT_ISWAP(a, b).with_tags('mytag2')]),
cirq.Moment([cirq.SQRT_ISWAP_INV(a, b).with_tags('mytag3')]),
]
),
expected=cirq.Circuit(
[
cirq.Moment([cirq.SQRT_ISWAP(a, b)]),
]
),
)
def test_no_touch_single_sqrt_iswap():
a, b = cirq.LineQubit.range(2)
circuit = cirq.Circuit(
[
cirq.Moment(
[cirq.ISwapPowGate(exponent=0.5, global_shift=-0.5).on(a, b).with_tags('mytag')]
),
]
)
assert_optimizes(
|
junranhe/tf-faster-rcnn
|
tools/test_net.py
|
Python
|
mit
| 3,912
| 0.016616
|
# --------------------------------------------------------
# Tensorflow Faster R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Zheqi he, Xinlei Chen, based on code from Ross Girshick
# ---------------------------------------------------
|
-----
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import _init_paths
from model.test import test_net
from model.config import cfg, cfg_from_file, cfg_from_list
from datasets.factory import get_imdb
import argparse
import pprint
import time, os, sys
import tensorflow as tf
from nets.mult_vgg16 import vgg16
from nets.resn
|
et_v1 import resnetv1
from nets.mult_mobilenet import mobilenet
def parse_args():
"""
Parse input arguments
"""
parser = argparse.ArgumentParser(description='Test a Fast R-CNN network')
parser.add_argument('--cfg', dest='cfg_file',
help='optional config file', default=None, type=str)
parser.add_argument('--model', dest='model',
help='model to test',
default=None, type=str)
parser.add_argument('--imdb', dest='imdb_name',
help='dataset to test',
default='voc_2007_test', type=str)
parser.add_argument('--comp', dest='comp_mode', help='competition mode',
action='store_true')
parser.add_argument('--num_dets', dest='max_per_image',
help='max number of detections per image',
default=100, type=int)
parser.add_argument('--tag', dest='tag',
help='tag of the model',
default='', type=str)
parser.add_argument('--net', dest='net',
help='vgg16, res50, res101, res152',
default='res50', type=str)
parser.add_argument('--set', dest='set_cfgs',
help='set config keys', default=None,
nargs=argparse.REMAINDER)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
print('Called with args:')
print(args)
if args.cfg_file is not None:
cfg_from_file(args.cfg_file)
if args.set_cfgs is not None:
cfg_from_list(args.set_cfgs)
print('Using config:')
pprint.pprint(cfg)
# if has model, get the name from it
# if does not, then just use the inialization weights
if args.model:
filename = os.path.splitext(os.path.basename(args.model))[0]
else:
filename = os.path.splitext(os.path.basename(args.weight))[0]
tag = args.tag
tag = tag if tag else 'default'
filename = tag + '/' + filename
imdb = get_imdb(args.imdb_name)
imdb.competition_mode(args.comp_mode)
tfconfig = tf.ConfigProto(allow_soft_placement=True)
tfconfig.gpu_options.allow_growth=True
# init session
sess = tf.Session(config=tfconfig)
# load network
if args.net == 'vgg16':
net = vgg16(batch_size=1)
elif args.net == 'res50':
net = resnetv1(batch_size=1, num_layers=50)
elif args.net == 'res101':
net = resnetv1(batch_size=1, num_layers=101)
elif args.net == 'res152':
net = resnetv1(batch_size=1, num_layers=152)
elif args.net == 'mobilenet':
net = mobilenet(batch_size=1)
else:
raise NotImplementedError
# load model
net.create_mult_architecture(sess, "TEST", [imdb.num_classes], tag='default',
anchor_scales=cfg.ANCHOR_SCALES,
anchor_ratios=cfg.ANCHOR_RATIOS)
if args.model:
print(('Loading model check point from {:s}').format(args.model))
saver = tf.train.Saver()
saver.restore(sess, args.model)
print('Loaded.')
else:
print(('Loading initial weights from {:s}').format(args.weight))
sess.run(tf.global_variables_initializer())
print('Loaded.')
test_net(sess, net.get_task_net(0), imdb, filename, max_per_image=args.max_per_image)
sess.close()
|
SepehrMN/nest-simulator
|
pynest/nest/tests/test_weights_as_lists.py
|
Python
|
gpl-2.0
| 4,995
| 0.002002
|
# -*- coding: utf-8 -*-
#
# test_weights_as_lists.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Weights given as lists with the different connection rules
"""
import unittest
import nest
@nest.ll_api.check_stack
class WeightsAsListTestCase(unittest.TestCase):
"""Test weights given as lists"""
def setUp(self):
nest.ResetKernel()
def test_OneToOneWeight(self):
"""Weight given as list, when connection rule is one_to_one"""
src = nest.Create('iaf_psc_alpha', 3)
tgt = nest.Create('iaf_psc_delta', 3)
# weight has to be a list with dimension (n_sources x 1) when one_to_one is used
ref_weights = [1.2, -3.5, 0.4]
conn_dict = {'rule': 'one_to_one'}
syn_dict = {'weight': ref_weights}
nest.Connect(src, tgt, conn_dict, syn_dict)
conns = nest.GetConnections()
weights = conns.weight
self.assertEqual(weights, ref_weights)
def test_AllToAllWeight(self):
"""Weight given as list of lists, when connection rule is all_to_all"""
src = nest.Create('iaf_psc_alpha', 3)
tgt = nest.Create('iaf_psc_delta', 2)
# weight has to be a list of lists with dimension (n_target x n_sources) when all_to_all is used
ref_weights = [[1.2, -3.5, 2.5], [0.4, -0.2, 0.7]]
conn_dict = {'rule': 'all_to_all'}
syn_dict = {'weight': ref_weights}
nest.Connect(src, tgt, conn_dict, syn_dict)
conns = nest.GetConnections()
weights = conns.weight
# Need to flatten ref_weights in order to compare with the weights given by the SynapseCollection.
ref_weights = [w for sub_weights in ref_weights for w in sub_weights]
self.assertEqual(weights.sort(), ref_weights.sort())
def test_FixedIndegreeWeight(self):
"""Weight given as list of list, when connection rule is fixed_indegree"""
src = nest.Create('iaf_psc_alpha', 5)
tgt = nest.Create('iaf_psc_delta', 3)
# weight has to be a list of lists with dimension (n_target x indegree) when fixed_indegree is used
ref_weights = [[1.2, -3.5], [0.4, -0.2], [0.6, 2.2]]
conn_dict = {'rule': 'fixed_indegree', 'indegree': 2}
syn_dict = {'weight': ref_weights}
nest.Connect(src, tgt, conn_dict, syn_dict)
conns = nest.GetConnections()
weights = conns.weight
# Need to flatten ref_weights in order to compare with the weights given by the SynapseCollection.
ref_weights = [w for sub_weights in ref_weights for w in sub_weights]
self.assertEqual(weights.sort(), ref_weights.sort())
def test_FixedOutdegreeWeight(self):
"""Weight given as list of lists, when connection rule is fixed_outdegree"""
src = nest.Create('iaf_psc_alpha', 2)
tgt = nest.Create('iaf_psc_delta', 5)
# weight has to be a list of lists with dimension (n_source x outegree) when fixed_outdegree is used
ref_weights = [[1.2, -3.5, 0.4], [-0.2, 0.6, 2.2]]
conn_dict = {'rule': 'fixed_outdegree', 'outdegree': 3}
syn_dict = {'weight': ref_weights}
nest.Connect(src, tgt, conn_dict, syn_dict)
conns = nest.GetConnections()
weights = conns.weight
# Need to flatten ref_weights in order to compare with the weights given by the SynapseCollection.
ref_weights = [w for s
|
ub_weights in ref_weights for w in sub_weights]
self.assertEqual(weights.sort(), ref_weights.sort())
def test_FixedTotalNumberWeight(self):
"""Weight given as list, when connection rule is fixed_total_number"""
src = nest.Create('iaf_psc_alp
|
ha', 3)
tgt = nest.Create('iaf_psc_delta', 4)
conn_dict = {'rule': 'fixed_total_number', 'N': 4}
# weight has to be a list with dimension (n_conns x 1) when fixed_total_number is used
ref_weights = [1.2, -3.5, 0.4, -0.2]
syn_dict = {'weight': ref_weights}
nest.Connect(src, tgt, conn_dict, syn_dict)
conns = nest.GetConnections()
weights = conns.weight
self.assertEqual(weights, ref_weights)
def suite():
suite = unittest.makeSuite(WeightsAsListTestCase, 'test')
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == "__main__":
run()
|
Teagan42/home-assistant
|
tests/components/mqtt/test_lock.py
|
Python
|
apache-2.0
| 20,567
| 0.000194
|
"""The tests for the MQTT lock platform."""
import json
from unittest.mock import ANY
from homeassistant.components import lock, mqtt
from homeassistant.components.mqtt.discovery import async_start
from homeassistant.const import (
ATTR_ASSUMED_STATE,
STATE_LOCKED,
STATE_UNAVAILABLE,
STATE_UNLOCKED,
)
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
async_fire_mqtt_message,
async_mock_mqtt_component,
mock_registry,
)
from tests.components.lock import common
async def test_controlling_state_via_topic(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
lock.DOMAIN,
{
lock.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "LOCKED",
"state_unlocked": "UNLOCKED",
}
},
)
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "LOCKED")
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
async_fire_mqtt_message(hass, "state-topic", "UNLOCKED")
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async def test_controlling_non_default_state_via_topic(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
lock.DOMAIN,
{
lock.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "closed",
"state_unlocked": "open",
}
},
)
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "closed")
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
async_fire_mqtt_message(hass, "state-topic", "open")
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock):
"""Test the controlling state via topic and JSON message."""
assert await async_setup_component(
hass,
lock.DOMAIN,
{
lock.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "LOCKED",
"state_unlocked": "UNLOCKED",
"value_template": "{{ value_json.val }}",
}
},
)
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async_fire_mqtt_message(hass, "state-topic", '{"val":"LOCKED"}')
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
async_fire_mqtt_message(hass, "state-topic", '{"val":"UNLOCKED"}')
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async def test_controlling_non_default_state_via_topic_and_json_message(
hass, mqtt_mock
):
"""Test the controlling state via topic and JSON message."""
assert await async_setup_component(
hass,
lock.DOMAIN,
{
lock.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "closed",
"state_unlocked": "open",
"value_template": "{{ value_json.val }}",
}
},
)
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async_fire_mqtt_message(hass, "state-topic", '{"val":"closed"}')
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
async_fire_mqtt_message(hass, "state-topic", '{"val":"open"}')
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock):
"""Test optimistic mode without state topic."""
assert await async_setup_component(
hass,
lock.DOMAIN,
{
lock.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "LOCKED",
"state_unlocked": "UNLOCKED",
}
},
)
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
await common.async_lock(hass, "lock.test")
mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
await common.async_unlock(hass, "lock.test")
mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
async def test_sending_mqtt_commands_and_explicit_optimistic(hass, mqtt_mock):
"
|
""Test optimistic mode without state topic."""
assert await async_setup_component(
hass,
lock.DOMAIN,
{
lock.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic
|
",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "LOCKED",
"state_unlocked": "UNLOCKED",
"optimistic": True,
}
},
)
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
await common.async_lock(hass, "lock.test")
mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
await common.async_unlock(hass, "lock.test")
mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
assert await async_setup_component(
hass,
lock.DOMAIN,
{
lock.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"availability_topic": "availability-topic",
}
},
)
state = hass.states.get("lock.test")
assert state.state is STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "
|
strogo/turbion
|
turbion/bits/utils/tests/merging.py
|
Python
|
bsd-3-clause
| 2,375
| 0.002947
|
from datetime import date
from django.db import models
from django.test import TestCase
from django.contrib.auth.models import User
from turbion.bits.utils import merging
class MyProfile(models.Model
|
):
user_ptr = models.ForeignKey(User, unique=True)
nickname = models.CharField(max_length=100)
www = models.URLField()
birth = models.DateField()
class Meta:
app_label="turbion"
class OtherProfile(models.Model):
|
user = models.ForeignKey(User, unique=True)
nickname = models.CharField(max_length=100)
website = models.URLField()
dob = models.DateField()
class Meta:
app_label="turbion"
class MyProfileLayer(merging.ModelLayer):
model = MyProfile
fields = ["nickname"]
aliases = {
"site": "www",
"day_of_birth": "birth"
}
key = 'user_ptr'
class OtherProfileLayer(merging.ModelLayer):
model = OtherProfile
fields = ["nickname"]
aliases = {
"site": "website",
"day_of_birth": "dob"
}
key = 'user'
create = True
merging.track([MyProfileLayer, OtherProfileLayer])
class Merge(TestCase):
def setUp(self):
self.user = User.objects.create_user(
"test",
"foobar@foo.bar"
)
self.my_profile = MyProfile.objects.create(
user_ptr=self.user,
nickname="test_foo",
www="http://foo.bar",
birth=date.today(),
)
def _test_objects(self, other):
my_profile = MyProfile.objects.get(pk=self.my_profile.pk)
self.assertEqual(other.nickname, my_profile.nickname)
self.assertEqual(other.website, my_profile.www)
self.assertEqual(other.dob, my_profile.birth)
def test_other_profile_existance(self):
self.assertEqual(
OtherProfile.objects.filter(user=self.user).count(),
1
)
other = OtherProfile.objects.get(user=self.user)
self._test_objects(other)
def test_other_change(self):
other = OtherProfile.objects.get(user=self.user)
other.website = "http://bar.foo"
other.save()
self._test_objects(other)
def test_my_change(self):
self.my_profile.website = "http://bar.foo"
self.my_profile.save()
other = OtherProfile.objects.get(user=self.user)
self._test_objects(other)
|
LeereNix/2chParser
|
optional.py
|
Python
|
bsd-3-clause
| 553
| 0.003617
|
import os.path
def writeToFile(element):
if not os.path.exists("interestingThreads.txt"):
outF = open("interestingThreads.txt", "w")
outF.write(string)
outF.close()
else:
outF = open("interestingThreads.txt", "a")
outF.write(string)
|
outF.close()
def countThreads(string):
number_threads = string.count("comment")
return number_threads
|
def write_thread(bytes, number):
name = "thread" + number + ".html"
f = open(name, 'wb')
f.write(bytes)
f.close
|
ravibhure/ansible
|
lib/ansible/modules/network/aci/aci_epg_monitoring_policy.py
|
Python
|
gpl-3.0
| 4,278
| 0.002338
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_epg_monitoring_policy
short_description: Manage monitoring policies on Cisco ACI fabrics (mon:EPGPol)
description:
- Manage monitoring policies on Cisco ACI fabrics.
- More information from the internal APIC class I(mon:EPGPol) at
U(https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Dag Wieers (@dagwieers)
version_added: '2.4'
requirements:
- ACI Fabric 1.0(3f)+
notes:
- The C(tenant) used must exist before using this module in your playbook.
The M(aci_tenant) module can be used for this.
options:
monitoring_policy:
description:
- The name of the monitoring policy.
required: yes
aliases: [ name ]
description:
description:
- Description for the monitoring policy.
aliases: [ descr ]
tenant:
description:
- The name of the tenant.
required: yes
aliases: [ tenant_name ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
# FIXME: Add more, better examples
EXAMPLES = r'''
- aci_epg_monitoring_policy:
hostname: '{{ hostname }}'
username: '{{ username }}'
password: '{{ password }}'
monitoring_policy: '{{ monitoring_policy }}'
description: '{{ description }}'
tenant: '{{ tenant }}'
'''
RETURN = r'''
#
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
monitoring_policy=dict(type='str', required=False, aliases=['name']), # Not required for querying all objects
tenant=dict(type='str', required=False, aliases=['tenant_name']), # Not required for querying all objects
description=dict(type='str', aliases=['descr']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
method=dict(type='str', choices=['delete', 'get', 'post'], aliases=['action'], removed_in_version='2.6'), # Deprecated starting from v2.6
protocol=dict(type='str', removed_in_version='2.6'), # Deprecated in v2.6
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['monitoring_policy', 'tenant']],
['state', 'present', ['monitoring_policy', 'tenant']],
],
)
monitoring_policy = module.params['monitoring_policy']
description = module.params['description']
|
state = module.params['state']
tenant = module.params['tenant']
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='fvTenant',
aci_rn='tn-{0}'.format(tenant),
filter_target='eq(fvTenant.name, "{0}")'.format(tenant),
module_object=tenant,
|
),
subclass_1=dict(
aci_class='monEPGPol',
aci_rn='monepg-{0}'.format(monitoring_policy),
filter_target='eq(monEPGPol.name, "{0}")'.format(monitoring_policy),
module_object=monitoring_policy,
),
)
aci.get_existing()
if state == 'present':
# Filter out module parameters with null values
aci.payload(
aci_class='monEPGPol',
class_config=dict(
name=monitoring_policy,
descr=description,
),
)
# Generate config diff which will be used as POST request body
aci.get_diff(aci_class='monEPGPol')
# Submit changes if module not in check_mode and the proposed is different than existing
aci.post_config()
elif state == 'absent':
aci.delete_config()
module.exit_json(**aci.result)
if __name__ == "__main__":
main()
|
dizzy54/fplbot
|
fplbot/settings.py
|
Python
|
mit
| 4,120
| 0.001214
|
"""
Django settings for fplbot project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
import sys
import dj_database_url
try:
from ignored import keys
except ImportError:
import keys
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
'''
# Adding custom library to python path
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(PROJECT_DIR, 'lib'))
'''
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', default=keys.keys['DJANGO_SECRET_KEY'])
# custom keys
PAGE_ACCESS_TOKEN = os.getenv('FB_PAGE_TOKEN', default='')
VERIFY_TOKEN = os.getenv('FB_VERIFY_TOKEN', default='')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'analysis',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'ratelimit.middleware.RatelimitMiddleware',
]
RATELIMIT_VIEW = 'analysis.views.limit_response'
ROOT_URLCONF = 'fplbot.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'fplbot.wsgi.application'
# Database
# https://docs.djangoproje
|
ct.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'fplbot_db',
'USER': 'fplbot',
'PASSWORD': 'fplbot0',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
# Password validation
# https://docs.djangoproje
|
ct.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Update database configuration with $DATABASE_URL.
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_DIR, 'static')
STATIC_URL = '/static/'
|
PixelDragon/pixeldragon
|
MARC21relaxed.py
|
Python
|
apache-2.0
| 30,253
| 0.007074
|
# ./MARC21relaxed.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:5e592dacc0cf5bbbe827fb7d980f3324ca92c3dc
# Generated 2016-12-21 00:24:34.092428 by PyXB version 1.2.4 using Python 2.7.12.final.0
# Namespace http://www.loc.gov/MARC21/slim
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:773ffeee-c70b-11e6-9daf-00e1020040ea')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.4'
# Generated bindings are not compatible across PyXB versions
#if pyxb.__version__ != _PyXBVersion:
# raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('http://www.loc.gov/MARC21/slim', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration
|
in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(n
|
ode, default_namespace)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}recordTypeType
class recordTypeType (pyxb.binding.datatypes.NMTOKEN, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'recordTypeType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 63, 2)
_Documentation = None
recordTypeType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=recordTypeType, enum_prefix=None)
recordTypeType.Bibliographic = recordTypeType._CF_enumeration.addEnumeration(unicode_value='Bibliographic', tag='Bibliographic')
recordTypeType.Authority = recordTypeType._CF_enumeration.addEnumeration(unicode_value='Authority', tag='Authority')
recordTypeType.Holdings = recordTypeType._CF_enumeration.addEnumeration(unicode_value='Holdings', tag='Holdings')
recordTypeType.Classification = recordTypeType._CF_enumeration.addEnumeration(unicode_value='Classification', tag='Classification')
recordTypeType.Community = recordTypeType._CF_enumeration.addEnumeration(unicode_value='Community', tag='Community')
recordTypeType._InitializeFacetMap(recordTypeType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'recordTypeType', recordTypeType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}leaderDataType
class leaderDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'leaderDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 82, 2)
_Documentation = None
leaderDataType._CF_pattern = pyxb.binding.facets.CF_pattern()
leaderDataType._CF_pattern.addPattern(pattern='[\\dA-Za-z\\.| ]{24}')
leaderDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
leaderDataType._InitializeFacetMap(leaderDataType._CF_pattern,
leaderDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'leaderDataType', leaderDataType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}controlDataType
class controlDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'controlDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 99, 2)
_Documentation = None
controlDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
controlDataType._InitializeFacetMap(controlDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'controlDataType', controlDataType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}controltagDataType
class controltagDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'controltagDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 104, 2)
_Documentation = None
controltagDataType._CF_pattern = pyxb.binding.facets.CF_pattern()
controltagDataType._CF_pattern.addPattern(pattern='[0-9A-Za-z]{3}')
controltagDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
controltagDataType._InitializeFacetMap(controltagDataType._CF_pattern,
controltagDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'controltagDataType', controltagDataType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}tagDataType
class tagDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'tagDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 122, 2)
_Documentation = None
tagDataType._CF_pattern = pyxb.binding.facets.CF_pattern()
tagDataType._CF_pattern.addPattern(pattern='(0([0-9A-Z][0-9A-Z])|0([1-9a-z][0-9a-z]))|(([1-9A-Z][0-9A-Z]{2})|([1-9a-z][0-9a-z]{2}))')
tagDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
tagDataType._InitializeFacetMap(tagDataType._CF_pattern,
tagDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'tagDataType', tagDataType)
# Atomic simple type: {http://www.loc.gov/MARC21/slim}indicatorDataType
class indicatorDataType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'indicatorDataType')
_XSDLocation = pyxb.utils.utility.Location('/data/code/pyMARC/xsd/MARC21relaxed.xsd', 128, 2)
_Documentation = None
indicatorDataType._CF_pattern = pyxb.binding.facets.CF_pattern()
indicatorDataType._CF_pattern.addPattern(pattern='[\\da-zA-Z_ ]{1}')
indicatorDataType._CF_whiteSpace = pyxb.binding.facets.CF_whiteSpace(value=pyxb.binding.facets._WhiteSpace_enum.preserve)
indicatorDataType._InitializeFacetMap(indicatorDataType._CF_pattern,
indicatorDataType._CF_whiteSpace)
Namespace.addCategoryObject('typeBinding', 'indi
|
domenicosolazzo/jroc
|
tests/tasks/tokenizers/__init__.py
|
Python
|
gpl-3.0
| 89
| 0.011236
|
from jroc.tasks.tokenizers.TokenizerTask import Senten
|
ceTo
|
kenizerTask, WordTokenizerTask
|
thrisp/flails
|
flask_flails/flinf.py
|
Python
|
mit
| 2,087
| 0
|
import pprint
class Flinf(object):
"""Information about a generated flask application. By default, provides
the url map and configuration variables of the generated application. To
return additional information pass a list to requested.
:param requested: a list of information items to extract from the
provided app, default includes a url map and listing of
configuaration variables
"""
def __init__(self, flail, app=None, requested=None):
self.flail = flail
self.app = app
self.provide_information = ['url_map', 'config_vars']
if requested:
self.provide_information.extend(requested)
self.printer = pprint.PrettyPrinter(indent=4)
@property
def config_vars(self):
return {k: v for k, v in self.app.config.iteritems()}
@property
def url_map(self):
return [r for r in self.app.url_map.iter_rules()]
@property
def jinja_env(self):
return self.app.jinja_
|
env.__dict__
@property
def list_templates(self):
return self.app.jinja_env.list_templates()
@property
def asset_env(self):
return self.jinja_env.get('assets_environment').__dict__
|
@property
def asset_bundles(self):
return self.asset_env['_named_bundles']
def return_basic(self, item):
return getattr(self.app, item, None)
@property
def app_information(self):
"""Returns a dict containing parameters in cls.provide_information
list attribute. This will first attempt to resolve the parameter in the
list as an attribute/property on this class, then as an attribute on
the current associated application."""
to_return = {}
for item in self.provide_information:
to_return[item] = getattr(self, item, self.return_basic(item))
return to_return
@property
def info_applog(self):
self.app.logger.info(self.app_information)
@property
def info_out(self):
self.printer.pprint(self.app_information)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.