text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
|---|---|---|---|---|---|---|
from typing import Optional, Dict, Union, List
from .singleton import Singleton
class Arguments(metaclass=Singleton):
"""
Arguments singleton
"""
class Name:
SETTINGS_FILE: str = 'settings_file'
SETTINGS: str = 'settings'
INIT: str = 'init'
VERBOSE: str = 'verbose'
QUIET: str = 'quiet'
PREVIEW: str = 'preview'
FORMATS: str = 'formats'
VERSION: str = 'version'
OUTPUT: str = 'output'
CLIENT_ID: str = 'client_id'
CLIENT_SECRET: str = 'client_secret'
CHANNEL: str = 'channel'
USER: str = 'user'
INCLUDES: str = 'includes'
FIRST: str = 'first'
VIDEO: str = 'video'
FORMAT: str = 'format'
TIMEZONE: str = 'timezone'
DEBUG: str = 'debug'
LOG: str = 'log'
def __init__(self, arguments: Optional[Dict[str, Union[str, bool, int]]] = None):
"""
Initialize arguments
:param arguments: Arguments from cli (Optional to call singleton instance without parameters)
"""
if arguments is None:
print('Error: arguments were not provided')
exit()
# Required arguments and booleans
self.settings_file: str = arguments[Arguments.Name.SETTINGS_FILE]
self.settings: str = arguments[Arguments.Name.SETTINGS]
self.init: bool = arguments[Arguments.Name.INIT]
self.verbose: bool = arguments[Arguments.Name.VERBOSE]
self.debug: bool = arguments[Arguments.Name.DEBUG]
self.quiet: bool = arguments[Arguments.Name.QUIET]
self.preview: bool = arguments[Arguments.Name.PREVIEW]
self.print_formats: bool = arguments[Arguments.Name.FORMATS]
self.print_version: bool = arguments[Arguments.Name.VERSION]
self.output: str = arguments[Arguments.Name.OUTPUT]
self.log: bool = arguments[Arguments.Name.LOG]
# Optional or prompted arguments
self.client_id: Optional[str] = arguments[Arguments.Name.CLIENT_ID]
self.client_secret: Optional[str] = arguments[Arguments.Name.CLIENT_SECRET]
self.oauth_token: Optional[str] = None
self.first: Optional[int] = arguments[Arguments.Name.FIRST]
self.timezone: Optional[str] = arguments[Arguments.Name.TIMEZONE]
self.includes: Optional[str] = arguments[Arguments.Name.INCLUDES]
# Arguments that require some formatting
self.video_ids: List[int] = []
self.formats: List[str] = []
self.channels: List[str] = []
self.users: List[str] = []
# Videos
if arguments[Arguments.Name.VIDEO]:
self.video_ids = [int(video_id) for video_id in arguments[Arguments.Name.VIDEO].lower().split(',')]
# Formats
if arguments[Arguments.Name.FORMAT]:
self.formats: Optional[List[str]] = arguments[Arguments.Name.FORMAT].lower().split(',')
# Channels
if arguments[Arguments.Name.CHANNEL]:
self.channels = arguments[Arguments.Name.CHANNEL].lower().split(',')
# Users
if arguments[Arguments.Name.USER]:
self.users = arguments[Arguments.Name.USER].lower().split(',')
|
PetterKraabol/Twitch-Chat-Downloader
|
tcd/arguments.py
|
Python
|
mit
| 3,199
| 0.001876
|
##############################################################################
# adaptiveMD: A Python Framework to Run Adaptive Molecular Dynamics (MD)
# Simulations on HPC Resources
# Copyright 2017 FU Berlin and the Authors
#
# Authors: Jan-Hendrik Prinz
# Contributors:
#
# `adaptiveMD` is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
# part of the code below was taken from `openpathsampling` see
# <http://www.openpathsampling.org> or
# <http://github.com/openpathsampling/openpathsampling
# for details and license
import inspect
import logging
import time
import uuid
logger = logging.getLogger(__name__)
class StorableMixin(object):
"""Mixin that allows objects of the class to to be stored using netCDF+
"""
_base = None
_args = None
_ignore = False
_find_by = []
INSTANCE_UUID = list(uuid.uuid1().fields[:-1])
CREATION_COUNT = 0L
ACTIVE_LONG = int(uuid.UUID(
fields=tuple(
INSTANCE_UUID +
[CREATION_COUNT]
)
))
@staticmethod
def get_uuid():
"""
Create a new unique ID
Returns
-------
long
the unique number for an object in the project
"""
StorableMixin.ACTIVE_LONG += 2
return StorableMixin.ACTIVE_LONG
def __init__(self):
# set the universal ID
self.__uuid__ = StorableMixin.get_uuid()
# set the creation time
self.__time__ = int(time.time())
self.__store__ = None
def __eq__(self, other):
if isinstance(other, StorableMixin):
return self.__uuid__ == other.__uuid__
return NotImplemented
def named(self, name):
"""
Attach a .name property to an object
Parameters
----------
name : str
the name of the object
Returns
-------
self
the object itself for chaining
"""
self.name = name
return self
def idx(self, store):
"""
Return the index which is used for the object in the given store.
Once you store a storable object in a store it gets assigned a unique
number that can be used to retrieve the object back from the store. This
function will ask the given store if the object is stored if so what
the used index is.
Parameters
----------
store : :class:`ObjectStore`
the store in which to ask for the index
Returns
-------
int or None
the integer index for the object of it exists or None else
"""
if hasattr(store, 'index'):
return store.index.get(self, None)
else:
return store.idx(self)
@property
def cls(self):
"""
Return the class name as a string
Returns
-------
str
the class name
"""
return self.__class__.__name__
@classmethod
def base(cls):
"""
Return the most parent class actually derived from StorableMixin
Important to determine which store should be used for storage
Returns
-------
type
the base class
"""
if cls._base is None:
if cls is not StorableMixin:
if StorableMixin in cls.__bases__:
cls._base = cls
else:
if hasattr(cls.__base__, 'base'):
cls._base = cls.__base__.base()
else:
cls._base = cls
return cls._base
def __hash__(self):
return hash(self.__uuid__)
@property
def base_cls_name(self):
"""
Return the name of the base class
Returns
-------
str
the string representation of the base class
"""
return self.base().__name__
@property
def base_cls(self):
"""
Return the base class
Returns
-------
type
the base class
See Also
--------
:func:`base()`
"""
return self.base()
@classmethod
def descendants(cls):
"""
Return a list of all subclassed objects
Returns
-------
list of type
list of subclasses of a storable object
"""
return cls.__subclasses__() + \
[g for s in cls.__subclasses__() for g in s.descendants()]
@staticmethod
def objects():
"""
Returns a dictionary of all storable objects
Returns
-------
dict of str : type
a dictionary of all subclassed objects from StorableMixin.
The name points to the class
"""
subclasses = StorableMixin.descendants()
return {subclass.__name__: subclass for subclass in subclasses}
@classmethod
def args(cls):
"""
Return a list of args of the `__init__` function of a class
Returns
-------
list of str
the list of argument names. No information about defaults is
included.
"""
try:
args = inspect.getargspec(cls.__init__)
except TypeError:
return []
return args[0]
_excluded_attr = []
_included_attr = []
_exclude_private_attr = True
_restore_non_initial_attr = True
_restore_name = True
def to_dict(self):
"""
Convert object into a dictionary representation
Used to convert the dictionary into JSON string for serialization
Returns
-------
dict
the dictionary representing the (immutable) state of the object
"""
excluded_keys = ['idx', 'json', 'identifier']
keys_to_store = {
key for key in self.__dict__
if key in self._included_attr or (
key not in excluded_keys and
key not in self._excluded_attr and
not (key.startswith('_') and self._exclude_private_attr)
)
}
return {
key: self.__dict__[key] for key in keys_to_store
}
@classmethod
def from_dict(cls, dct):
"""
Reconstruct an object from a dictionary representation
Parameters
----------
dct : dict
the dictionary containing a state representation of the class.
Returns
-------
:class:`StorableMixin`
the reconstructed storable object
"""
if dct is None:
dct = {}
if hasattr(cls, 'args'):
args = cls.args()
init_dct = {key: dct[key] for key in dct if key in args}
try:
obj = cls(**init_dct)
if cls._restore_non_initial_attr:
non_init_dct = {
key: dct[key] for key in dct if key not in args}
if len(non_init_dct) > 0:
for key, value in non_init_dct.iteritems():
setattr(obj, key, value)
return obj
except TypeError as e:
if hasattr(cls, 'args'):
err = (
'Could not reconstruct the object of class `%s`. '
'\nStored parameters: %s \n'
'\nCall parameters: %s \n'
'\nSignature parameters: %s \n'
'\nActual message: %s'
) % (
cls.__name__,
str(dct),
str(init_dct),
str(cls.args),
str(e)
)
raise TypeError(err)
else:
raise
else:
return cls(**dct)
def create_to_dict(keys_to_store):
"""
Create a to_dict function from a list of attributes
Parameters
----------
keys_to_store : list of str
the attributes used in { attr: getattr(self, attr) }
Returns
-------
function
the `to_dict` function
"""
def to_dict(self):
return {key: getattr(self, key) for key in keys_to_store}
return to_dict
|
thempel/adaptivemd
|
adaptivemd/mongodb/base.py
|
Python
|
lgpl-2.1
| 9,063
| 0.00011
|
# -*- coding: utf-8 -*-
#抓取网易公开课链接并下载对应的相视频名称和视频格式
#By : obnjis@163.com
#Python 2.7 + BeautifulSoup 4
#2014-12-18
#断点续传功能等待下一版本
#eg: python 163—video.py http://v.163.com/special/opencourse/ios7.html
from bs4 import BeautifulSoup
import re
import sys,os
import urllib
import codecs
#显示百分比
def rpb(blocknum, blocksize, totalsize):
percent = 100.0 * blocknum * blocksize / totalsize
if percent > 100:percent = 100
#格式化输出下载进度
sys.stdout.write("'[%.2f%%] \r" % (percent) )
#让下载百分比再同一行不断刷新,不需要换行
sys.stdout.flush()
def parser(url):
#获取页面
html = urllib.urlopen(url).read()
#在已知文档编码类型的情况下,可以先将编码转换为unicode形式,在转换为utf-8编码,然后才传递给BeautifulSoup
htm=unicode(html,'gb2312','ignore').encode('utf-8','ignore')
#用BeautifulSoup来装载
soup = BeautifulSoup(htm)
#获取每集的真实下载MP4地址
detail=soup.find('div',{"class":'f-pa menu j-downitem'})
downlink=detail.findAll('a')[2]
downlink1=downlink.attrs.get('href')
print downlink1
return downlink1
#def clearData(self):
#网易有些公开课专辑抓下来时,前10集的链接会出现重复,需要去除重复链接
def downlaod(url):
#获取页面
html = urllib.urlopen(url).read()
#在已知文档编码类型的情况下,可以先将编码转换为unicode形式,在转换为utf-8编码,然后才传递给BeautifulSoup
htm=unicode(html,'gb2312','ignore').encode('utf-8','ignore')
#用BeautifulSoup来装载
soup = BeautifulSoup(htm)
#获取课程详细列表信息
detail=soup.findAll('tr',{'class':['u-odd','u-even']})
for i in detail:
#获取课程名称
linkurl=i.find('td',{"class" : "u-ctitle"})
downLink=linkurl.a['href']
fileName=linkurl.contents[0].strip() .lstrip() .rstrip('>') + linkurl.a.string.strip() .lstrip() .rstrip('<')
print fileName
print downLink
#L=[]
#L.append(fileName)
if not os.path.exists(fileName):
downLink1=parser(downLink)
urllib.urlretrieve(downLink1,fileName+".mp4",rpb)
def main(argv):
if len(argv)>=2:
downlaod(argv[1])
if __name__=="__main__":
main(sys.argv)
|
frankeen/163_Opencourse_download
|
163_download.py
|
Python
|
mit
| 2,432
| 0.033838
|
"""
The "travel from home to the park" example from my lectures.
Author: Dana Nau <nau@cs.umd.edu>, May 31, 2013
This file should work correctly in both Python 2.7 and Python 3.2.
"""
import pyhop
import random
import sys
from utils_plan import *
from util_plot import *
# state variables
state1 = pyhop.State('state')
state1.status = 3
state1.concepts = ["Concept A", "Concept B", "Concept C"]
state1.relations = ["Relation A", "Relation B", "Relation C"]
state1.concepts_heard_count = [0,0,0]
state1.relations_heard_count = [0,0,0]
state1.variables = pyhop.State('variables')
state1.variables.affect = pyhop.State('affect')
state1.variables.affect.skill = 0
state1.variables.affect.challenge = 0
state1.variables.affect.boredom = 2
state1.variables.affect.frustration = -2
state1.variables.affect.confidence = DictEx(2)
# end - state variables
# Operators
# TEST: what if we do not intake state as an argument?
# Heard(C, C_HearCount), Frustration--, Boredom++, Skill++
def print_a_concept_1(state, c):
state.status -= 1
state.concepts_heard_count[state.concepts.index(c)] += 1
state.variables.affect.frustration -= 1
state.variables.affect.boredom += 1
state.variables.affect.skill += 1
# print "Style 1", c
return state # TEST: what if we do not return anything?
def print_a_concept_2(state, c):
state.concepts_heard_count[state.concepts.index(c)] += 1
state.status -= 1
state.variables.affect.frustration -= 1
state.variables.affect.boredom += 1
state.variables.affect.skill += 1
# print "Style 2", c
return state # TEST: what if we do not return anything?
def print_a_relation_1(state, r):
state.relations_heard_count[state.relations.index(r)] += 1
state.status -= 1
state.variables.affect.frustration -= 1
state.variables.affect.boredom += 1
state.variables.affect.skill += 1
# print r
return state
def print_a_relation_2(state, r):
state.relations_heard_count[state.relations.index(r)] += 1
state.status -= 1
state.variables.affect.frustration -= 1
state.variables.affect.boredom += 1
state.variables.affect.skill += 1
# print r
return state
# Learned(R*) or Not(Learned(R*)), ((Confidence(C)++) or (Frustration++, Confidence(C)--)), Challenge++, Boredom--
def ask_true_false_on_concept(state, c):
state.status += 1
state.variables.affect.frustration += 1 # how to model user's success rate on a particular question
state.variables.affect.challenge += 1
state.variables.affect.boredom -= 1
# print "Is it true?\n", c
return state
def ask_true_false_on_relation(state, r):
state.status += 1
state.variables.affect.frustration += 1 # how to model user's success rate on a particular question
state.variables.affect.challenge += 1
state.variables.affect.boredom -= 1
# print "Is it true?\n", r
return state
def show_congrats(state, a = 0):
return state
pyhop.declare_operators(print_a_concept_1, print_a_concept_2, print_a_relation_1, print_a_relation_2, ask_true_false_on_concept, ask_true_false_on_relation, show_congrats)
# pyhop.print_operators()
# End - Operators
# Methods
def present_a_concept(state, c):
if state.variables.affect.boredom < 3:
if random.randint(0,100) < 50:
return [('print_a_concept_1', c)]
else:
return [('print_a_concept_2', c)]
return []
def present_a_relation(state, r):
if state.concepts_heard_count[state.relations.index(r)] > 0:
if state.variables.affect.boredom < 3:
if random.randint(0,100) < 50:
return [('print_a_relation_1', r)]
else:
return [('print_a_relation_2', r)]
return []
def quest_on_concept(state, c):
if state.concepts_heard_count[state.concepts.index(c)] > 0:
if state.variables.affect.frustration < 3:
return [('ask_true_false_on_concept', c)]
return []
def quest_on_relation(state, r):
if state.relations_heard_count[state.relations.index(r)] > 0:
if state.variables.affect.frustration < 3:
return [('ask_true_false_on_relation', r)]
return []
def get_random_entry(collection):
return collection[random.randint(0, len(collection)-1)]
def next_step(state, dummy):
r = random.randint(0,100)
if r < 25:
return [("present_a_concept", get_random_entry(state.concepts))]
elif r < 50:
return [("present_a_relation", get_random_entry(state.relations))]
elif r < 75:
return [("quest_on_concept", get_random_entry(state.concepts))]
else:
return [("quest_on_relation", get_random_entry(state.relations))]
def done(state, dummy):
return [("show_congrats", dummy)]
def teach_knowledge(state, target_heard_count):
for hc in state.concepts_heard_count:
if hc < target_heard_count:
return [('next_step', target_heard_count), ('teach', target_heard_count)]
for hc in state.relations_heard_count:
if hc < target_heard_count:
return [('next_step', target_heard_count), ('teach', target_heard_count)]
return [('done', target_heard_count)]
# have to specify the data structure of map and then how that is to be disseminated using the existing methods
pyhop.declare_methods('present_a_concept',present_a_concept)
pyhop.declare_methods('present_a_relation',present_a_relation)
pyhop.declare_methods('quest_on_concept',quest_on_concept)
pyhop.declare_methods('quest_on_relation',quest_on_relation)
pyhop.declare_methods('next_step',next_step)
pyhop.declare_methods('done',done)
pyhop.declare_methods('teach',teach_knowledge)
# print('')
# pyhop.print_methods()
# End - Methods
#result format: ('print_a_concept_1', 'Concept B'), ('print_a_concept_2', 'Concept A'),
# query
versbose_level = 1
target_heard_count = 5
if len(sys.argv) > 1:
if sys.argv[1] == "--help":
print "args: target_heard_count versbose_level"
exit(0)
target_heard_count = int(sys.argv[1])
if len(sys.argv) > 2:
versbose_level = int(sys.argv[2])
print "planning for target_heard_count:", target_heard_count, " with versbose_level:", versbose_level
result = pyhop.pyhop(state1,[('teach', target_heard_count)], verbose=versbose_level)
# plot_plan(result)
state_data = simulate_plan_execute(result, state1)
plot_plan(result, state_data, ["/affect/challenge", "/affect/boredom", "/affect/skill", '/affect/frustration'])
#end - query
|
mdasifhasan/Experiments_HTN_Planner
|
PyHop/exp_1.py
|
Python
|
gpl-3.0
| 6,445
| 0.007913
|
# -*- coding: utf-8 -*-
from Scraping4blog import Scraping4blog
import sys,os
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../util')
from settings import SettingManager
def main():
conf = SettingManager()
instance = Scraping4blog(conf)
instance.run()
if __name__ == "__main__":
main()
|
yamanakahirofumi/mokobot
|
Scraping4blog/run.py
|
Python
|
mit
| 323
| 0.01548
|
'''OpenGL extension OES.blend_subtract
This module customises the behaviour of the
OpenGL.raw.GLES1.OES.blend_subtract to provide a more
Python-friendly API
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/OES/blend_subtract.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLES1 import _types, _glgets
from OpenGL.raw.GLES1.OES.blend_subtract import *
from OpenGL.raw.GLES1.OES.blend_subtract import _EXTENSION_NAME
def glInitBlendSubtractOES():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
|
stack-of-tasks/rbdlpy
|
tutorial/lib/python2.7/site-packages/OpenGL/GLES1/OES/blend_subtract.py
|
Python
|
lgpl-3.0
| 768
| 0.009115
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module is Copyright (c) 2009-2013 General Solutions (http://gscom.vn) All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Vietnam Chart of Accounts",
"version" : "1.0",
"author" : "General Solutions",
'website': 'http://gscom.vn',
"category" : "Localization/Account Charts",
"description": """
This is the module to manage the accounting chart for Vietnam in OpenERP.
=========================================================================
This module applies to companies based in Vietnamese Accounting Standard (VAS).
**Credits:** General Solutions.
""",
"depends" : ["account","base_vat","base_iban"],
"data" : ["account_tax_code.xml","account_chart.xml","account_tax.xml","l10n_vn_wizard.xml"],
"demo" : [],
'auto_install': False,
"installable": True,
}
|
addition-it-solutions/project-all
|
addons/l10n_vn/__openerp__.py
|
Python
|
agpl-3.0
| 1,696
| 0.008255
|
# -*- coding: utf-8 -*-
# Copyright (C) 2019 - 2020 by Pedro Mendes, Rector and Visitors of the
# University of Virginia, University of Heidelberg, and University
# of Connecticut School of Medicine.
# All rights reserved.
# Copyright (C) 2017 - 2018 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., University of Heidelberg, and University of
# of Connecticut School of Medicine.
# All rights reserved.
# Copyright (C) 2010 - 2016 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., University of Heidelberg, and The University
# of Manchester.
# All rights reserved.
# Copyright (C) 2008 - 2009 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., EML Research, gGmbH, University of Heidelberg,
# and The University of Manchester.
# All rights reserved.
# Copyright (C) 2007 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc. and EML Research, gGmbH.
# All rights reserved.
import COPASI
import unittest
from types import *
class Test_CReport(unittest.TestCase):
def setUp(self):
self.datamodel=COPASI.CRootContainer.addDatamodel()
self.task=self.datamodel.getTask(0)
self.report=self.task.getReport()
self.assert_(isinstance(self.report, COPASI.CReport))
def test_getReportDefinition(self):
reportDefinition=self.report.getReportDefinition()
self.assert_(reportDefinition!=None)
self.assert_(reportDefinition.__class__==COPASI.CReportDefinition)
def test_setReportDefinition(self):
listOfReportDefinitions=self.datamodel.getReportDefinitionList()
reportDefinition=listOfReportDefinitions.createReportDefinition("MyReportDefinition","No Comment")
self.assert_(reportDefinition!=None)
self.report.setReportDefinition(reportDefinition)
self.assert_(self.report.getReportDefinition().getKey()==reportDefinition.getKey())
def test_getTarget(self):
target=self.report.getTarget()
self.assert_(target!=None)
self.assert_(type(target)==StringType)
def test_setTarget(self):
target="MyTaget.txt"
self.report.setTarget(target)
t=self.report.getTarget()
self.assert_(t!=None)
self.assert_(t==target)
def test_append(self):
append=self.report.append
self.assert_(type(append)==BooleanType)
def test_setAppend(self):
self.report.setAppend(True)
append=self.report.append
self.assert_(append==True)
self.report.setAppend(False)
append=self.report.append
self.assert_(append==False)
def suite():
tests=[
'test_getReportDefinition'
,'test_setReportDefinition'
,'test_getTarget'
,'test_setTarget'
,'test_append'
,'test_setAppend'
]
return unittest.TestSuite(map(Test_CReport,tests))
if(__name__ == '__main__'):
unittest.TextTestRunner(verbosity=2).run(suite())
|
copasi/COPASI
|
copasi/bindings/python/unittests/Test_CReport.py
|
Python
|
artistic-2.0
| 3,011
| 0.026901
|
import os, sys
from mod_python import util, Cookie
def _path(): return '/'.join(os.path.realpath(__file__).split('/')[:-1])
if _path() not in sys.path: sys.path.insert(0, _path())
def _load(req, page, dev= False):
"""
if not dev:
branch= 'html'
page= 'maintenance'
f= open('%s/%s/%s.html' %(_path(), branch, page), 'rb')
html= f.read()
f.close()
return html
"""
if page in (
'panel', 'driver', 'campaign'
):
c= Cookie.get_cookies(req, Cookie.Cookie)
if c.has_key('username')== False:
if page== 'campaign':
try:
page+= '&'+ req.subprocess_env['QUERY_STRING']
except:
pass
util.redirect(req, 'token' if page== 'broadcast' else 'login?page='+ page)
return
branch= 'html' if dev else 'html_'
f= open('%s/%s/%s.html' %(_path(), branch, page), 'rb')
html= f.read()
f.close()
return html
def header(req): return _load(req, 'header')
def footer(req): return _load(req, 'footer')
def register(req): return _load(req, 'register')
def registered(req): return _load(req, 'registered')
def login(req): return _load(req, 'login')
def token(req): return _load(req, 'token')
def request(req): return _load(req, 'request')
def requested(req): return _load(req, 'requested')
def change(req): return _load(req, 'change')
def changed(req): return _load(req, 'changed')
def driver(req): return _load(req, 'driver')
def panel(req): return _load(req, 'panel')
def campaign(req): return _load(req, 'campaign')
def counter(req): return _load(req, 'counter')
def counter_(req):
import urllib, json
form= util.FieldStorage(req, keep_blank_values= 1).items()
data= dict(json.loads(json.dumps(form, ensure_ascii=False)))
url= 'http://moon.greenpeace.org/c/print.php?a=%s' %data['a']
struct= {'n': urllib.urlopen(url).read()}
if "callback" in data: callback= data["callback"]
else: callback= None
req.content_type= "application/json"
if(callback): response= "%s(%s)" %(callback, json.dumps(struct, ensure_ascii=False))
else: response= struct
return response
def index(req):
util.redirect(req, 'panel')
return
def dev(req):
params= dict((item.split('=') for item in req.subprocess_env['QUERY_STRING'].split('&')))
page= params['page']
return _load(req, page, dev= True)
#def client(req): return _load(req, 'client')
|
Sth0nian/aurora
|
index.py
|
Python
|
mit
| 2,289
| 0.046308
|
#
# Author: Henrique Pereira Coutada Miranda
# Example script to plot the weigth of the atomic species in the bandstructure
#
from qepy import *
import sys
import argparse
import matplotlib.pyplot as plt
folder = 'bands'
npoints = 20
p = Path([ [[0.0, 0.0, 0.0],'G'],
[[0.5, 0.0, 0.0],'M'],
[[1./3,1./3,0.0],'K'],
[[0.0, 0.0, 0.0],'G']], [int(npoints*2),int(npoints),int(sqrt(5)*npoints)])
#parse options
parser = argparse.ArgumentParser(description='Test the yambopy script.')
parser.add_argument('-c' ,'--calc', action="store_true", help='Project orbitals')
parser.add_argument('-a' ,'--analyse', action="store_true", help='Analyse data')
parser.add_argument('-p1' ,'--plot_size', action="store_true", help='Analyse data')
parser.add_argument('-p2' ,'--plot_orbital', action="store_true", help='Analyse data')
args = parser.parse_args()
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
if args.calc:
f = open('proj.in','w')
projwfc = ProjwfcIn('mos2')
projwfc.write(folder=folder)
projwfc.run(folder=folder)
if args.analyse:
pxml = ProjwfcXML('mos2',path=folder)
# obtain the list of orbitals and quantum numbers
print pxml
print "Writting projections"
pxml.write_proj()
print "done!"
if args.plot_size:
pxml = ProjwfcXML('mos2',path=folder)
print pxml
# select orbitals to plot
# example1 mo, s2 and mos2
mo = list(xrange(16)) #list containing the indexes of all the orbitals of mo
s = list(xrange(16,48)) #list containing the indexes of all the orbitals of s
fig = plt.figure(figsize=(30,10))
for n,(orb,title) in enumerate(zip([mo,s,mo+s],['mo','s','mos2'])):
ax = plt.subplot(1,3,n+1)
plt.title(title)
pxml.plot_eigen(ax,path=p,selected_orbitals=orb,size=40)
ax.set_ylim([-7,6])
plt.show()
if args.plot_orbital:
pxml = ProjwfcXML('mos2',path=folder)
print pxml
# select orbitals to plot
# example1 mo, s2
mo = list(xrange(16)) #list containing the indexes of all the orbitals of mo
s = list(xrange(16,48)) #list containing the indexes of all the orbitals of s
fig = plt.figure(figsize=(8,10))
ax = plt.subplot(1,1,1)
pxml.plot_eigen(ax,path=p,selected_orbitals=mo,selected_orbitals_2=s,size=40,cmap='RdBu')
ax.set_ylim([-7,6])
plt.show()
|
palful/yambopy
|
tutorial/mos2/proj_mos2.py
|
Python
|
bsd-3-clause
| 2,375
| 0.026947
|
#!/usr/bin/env python3
# Copyright (C) 2020-2021 The btclib developers
#
# This file is part of btclib. It is subject to the license terms in the
# LICENSE file found in the top-level directory of this distribution.
#
# No part of btclib including this file, may be copied, modified, propagated,
# or distributed except according to the terms contained in the LICENSE file.
"""btclib.bip32 non-regression tests."""
|
fametrano/BitcoinBlockchainTechnology
|
tests/bip32/__init__.py
|
Python
|
mit
| 417
| 0
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 27 01:20:46 2016
@author: caioau
"""
import matplotlib.pyplot as plt
import networkx as nx
from networkx.drawing.nx_agraph import graphviz_layout
def main():
G = nx.DiGraph() # G eh um grafo direcionado
# gera o grafo apartir de suas arestas
G.add_weighted_edges_from([(1,2,2.0),(1,3,1.0),(2,3,3.0),(2,4,3.0),(3,5,1.0),(4,6,2.0),(5,4,2.0),(5,6,5.0)])
for i in G.edges():
# print i[0], i[1]
G[i[0]][i[1]]["color"] = "black"
# G[1][2]["color"] = "red"
maiorCaminho = nx.dag_longest_path(G)
print maiorCaminho
for i in range(1, len(maiorCaminho)):
G[maiorCaminho[i-1]][maiorCaminho[i]]["color"] = "red"
desenhaGrafo(G, "grafo-3.png")
def desenhaGrafo(G,pngfilename): # desenha o grafo e salva numa imagem png
edge_labels=dict([((u,v,),d['weight']) # gera os labels das arestas
for u,v,d in G.edges(data=True)])
colors = [G[u][v]['color'] for u,v in G.edges()]
pos = graphviz_layout(G,prog='neato') # obtem a posicao dos nos (para desenhalo) # TODO: desativar isso?
nx.draw_networkx_edges(G,pos, edge_color=colors) # desenha as arestas
nx.draw_networkx_labels(G,pos) # desenha os labels das arestas
nx.draw_networkx_edge_labels(G,pos,edge_labels=edge_labels) # desenha os labels dos nos
nx.draw_networkx_nodes(G,pos,node_color='w') # desenha os nos
plt.axis('off') # desativa os eixos
plt.savefig(pngfilename)
plt.close("all")
if __name__ == "__main__":
main()
|
caioau/caioau-personal
|
fluxos/lista 2/grafo-3.py
|
Python
|
gpl-3.0
| 1,558
| 0.032734
|
import builtins
# exec being part of builtins is Python 3 only
builtins.exec("print(42)") # $getCode="print(42)"
|
github/codeql
|
python/ql/test/library-tests/frameworks/stdlib-py3/CodeExecution.py
|
Python
|
mit
| 115
| 0
|
#!/usr/bin/env python
from __future__ import print_function
from molmod import *
# 0) Load the molecule and set the default graph
mol = Molecule.from_file("dopamine.xyz")
mol.set_default_graph()
# 1) Build a list of atom indexes involved in angles.
angles = []
# First loop over all atoms on the molecule.
for i1 in range(mol.size):
# For each atom we will find all bending angles centered at the current
# atom. For this we construct (an ordered!) list of all bonded neighbors.
n = list(mol.graph.neighbors[i1])
# The second loop iterates over all neighbors. The enumerate function is
# used to assign a counter value to the variable index.
for index, i0 in enumerate(n):
# The third loop iterates over all other neighbors that came before i1.
for i2 in n[:index]:
# Each triple is stored as an item in the list angles.
angles.append((i0, i1, i2))
# 2) Iterate over all angles, compute and print.
print("An overview of all bending angles in dopamine:")
for i0, i1, i2 in angles:
# Notice again the [0] at the end.
angle = bend_angle(mol.coordinates[[i0, i1, i2]])[0]
# Python formatting of the indexes, symbols, and the angle in degrees.
print("%2i %2i %2i %2s %2s %2s %5.1f" % (
i0, i1, i2, mol.symbols[i0], mol.symbols[i1], mol.symbols[i2], angle/deg
))
|
molmod/molmod
|
molmod/examples/003_internal_coordinates/b_bending_angles.py
|
Python
|
gpl-3.0
| 1,360
| 0.000735
|
import sys
from typing import List
from .device import Device
from .enumeratehelper import EnumerateHelper
class DeviceManager(object):
__instance = None
@classmethod
def instance(cls):
if cls.__instance is None:
helper = EnumerateHelper()
cls.__instance = cls(helper)
return cls.__instance
def __init__(self, helper: EnumerateHelper):
self.helper = helper
self.devices: List[Device] = []
if "--add-dummy-joystick" in sys.argv:
self.add_joystick_device(
{
"axes": 2,
"hats": 0,
"balls": 0,
"buttons": 1,
"name": "Dummy Joystick",
"id": "Dummy Joystick",
}
)
def get_devices(self, refresh: bool = False) -> List[Device]:
print("DeviceManager.get_devices")
if self.helper is not None:
if refresh:
self.helper.update()
else:
self.helper.init()
return self.helper.devices
print("DeviceManager.get_devices ->", self.devices)
return self.devices
def add_device_from_event(self, event):
if event["type"] == "joy-device-added":
return self.add_joystick_device(event)
elif event["type"] == "mouse-device-added":
return self.add_mouse_device(event)
elif event["type"] == "keyboard-device-added":
return self.add_keyboard_device(event)
else:
assert False
def add_joystick_device(self, event):
device = Device()
device.axes = event["axes"]
device.balls = event["balls"]
device.hats = event["hats"]
device.buttons = event["buttons"]
device.name = event["name"]
device.id = event["id"]
device.type = Device.TYPE_JOYSTICK
self.devices.append(device)
print("[INPUT] Joystick device added:", device.name)
return device
def add_keyboard_device(self, event):
device = Device()
device.name = event["name"]
device.id = event["id"]
device.type = Device.TYPE_KEYBOARD
self.devices.append(device)
print("[INPUT] Keyboard device added:", device.name)
return device
def add_mouse_device(self, event):
device = Device()
device.name = event["name"]
device.id = event["id"]
device.type = Device.TYPE_MOUSE
self.devices.append(device)
print("[INPUT] Mouse device added:", device.name)
return device
def remove_all_devices(self):
for device in self.devices:
print("[INPUT] Removing device", device.name)
self.devices.clear()
|
FrodeSolheim/fs-uae-launcher
|
fsgamesys/input/devicemanager.py
|
Python
|
gpl-2.0
| 2,793
| 0
|
# $Id$
"""Mixins that are useful for classes using vtk_kit.
@author: Charl P. Botha <http://cpbotha.net/>
"""
from external.vtkPipeline.ConfigVtkObj import ConfigVtkObj
from external.vtkPipeline.vtkMethodParser import VtkMethodParser
from module_base import ModuleBase
from module_mixins import IntrospectModuleMixin # temporary
import module_utils # temporary, most of this should be in utils.
import re
import types
import utils
#########################################################################
class PickleVTKObjectsModuleMixin(object):
"""This mixin will pickle the state of all vtk objects whose binding
attribute names have been added to self._vtkObjects, e.g. if you have
a self._imageMath, '_imageMath' should be in the list.
Your module has to derive from module_base as well so that it has a
self._config!
Remember to call the __init__ of this class with the list of attribute
strings representing vtk objects that you want pickled. All the objects
have to exist and be initially configured by then.
Remember to call close() when your child class close()s.
"""
def __init__(self, vtkObjectNames):
# you have to add the NAMES of the objects that you want pickled
# to this list.
self._vtkObjectNames = vtkObjectNames
self.statePattern = re.compile ("To[A-Z0-9]")
# make sure that the state of the vtkObjectNames objects is
# encapsulated in the initial _config
self.logic_to_config()
def close(self):
# make sure we get rid of these bindings as well
del self._vtkObjectNames
def logic_to_config(self):
parser = VtkMethodParser()
for vtkObjName in self._vtkObjectNames:
# pickled data: a list with toggle_methods, state_methods and
# get_set_methods as returned by the vtkMethodParser. Each of
# these is a list of tuples with the name of the method (as
# returned by the vtkMethodParser) and the value; in the case
# of the stateMethods, we use the whole stateGroup instead of
# just a single name
vtkObjPD = [[], [], []]
vtkObj = getattr(self, vtkObjName)
parser.parse_methods(vtkObj)
# parser now has toggle_methods(), state_methods() and
# get_set_methods();
# toggle_methods: ['BlaatOn', 'AbortExecuteOn']
# state_methods: [['SetBlaatToOne', 'SetBlaatToTwo'],
# ['SetMaatToThree', 'SetMaatToFive']]
# get_set_methods: ['NumberOfThreads', 'Progress']
for method in parser.toggle_methods():
# if you query ReleaseDataFlag on a filter with 0 outputs,
# VTK yields an error
if vtkObj.GetNumberOfOutputPorts() == 0 and \
method == 'ReleaseDataFlagOn':
continue
# we need to snip the 'On' off
val = eval("vtkObj.Get%s()" % (method[:-2],))
vtkObjPD[0].append((method, val))
for stateGroup in parser.state_methods():
# we search up to the To
end = self.statePattern.search (stateGroup[0]).start ()
# so we turn SetBlaatToOne to GetBlaat
get_m = 'G'+stateGroup[0][1:end]
# we're going to have to be more clever when we set_config...
# use a similar trick to get_state in vtkMethodParser
val = eval('vtkObj.%s()' % (get_m,))
vtkObjPD[1].append((stateGroup, val))
for method in parser.get_set_methods():
val = eval('vtkObj.Get%s()' % (method,))
vtkObjPD[2].append((method, val))
# finally set the pickle data in the correct position
setattr(self._config, vtkObjName, vtkObjPD)
def config_to_logic(self):
# go through at least the attributes in self._vtkObjectNames
for vtkObjName in self._vtkObjectNames:
try:
vtkObjPD = getattr(self._config, vtkObjName)
vtkObj = getattr(self, vtkObjName)
except AttributeError:
print "PickleVTKObjectsModuleMixin: %s not available " \
"in self._config OR in self. Skipping." % (vtkObjName,)
else:
for method, val in vtkObjPD[0]:
if val:
eval('vtkObj.%s()' % (method,))
else:
# snip off the On
eval('vtkObj.%sOff()' % (method[:-2],))
for stateGroup, val in vtkObjPD[1]:
# keep on calling the methods in stategroup until
# the getter returns a value == val.
end = self.statePattern.search(stateGroup[0]).start()
getMethod = 'G'+stateGroup[0][1:end]
for i in range(len(stateGroup)):
m = stateGroup[i]
eval('vtkObj.%s()' % (m,))
tempVal = eval('vtkObj.%s()' % (getMethod,))
if tempVal == val:
# success! break out of the for loop
break
for method, val in vtkObjPD[2]:
try:
eval('vtkObj.Set%s(val)' % (method,))
except TypeError:
if type(val) in [types.TupleType, types.ListType]:
# sometimes VTK wants the separate elements
# and not the tuple / list
eval("vtkObj.Set%s(*val)"%(method,))
else:
# re-raise the exception if it wasn't a
# tuple/list
raise
#########################################################################
# note that the pickle mixin comes first, as its config_to_logic/logic_to_config
# should be chosen over that of noConfig
class SimpleVTKClassModuleBase(PickleVTKObjectsModuleMixin,
IntrospectModuleMixin,
ModuleBase):
"""Use this base to make a DeVIDE module that wraps a single VTK
object. The state of the VTK object will be saved when the network
is.
You only have to override the __init__ method and call the __init__
of this class with the desired parameters.
The __doc__ string of your module class will be replaced with the
__doc__ string of the encapsulated VTK class (and will thus be
shown if the user requests module help). If you don't want this,
call the ctor with replaceDoc=False.
inputFunctions is a list of the complete methods that have to be called
on the encapsulated VTK class, e.g. ['SetInput1(inputStream)',
'SetInput1(inputStream)']. The same goes for outputFunctions, except that
there's no inputStream involved. Use None in both cases if you want
the default to be used (SetInput(), GetOutput()).
"""
def __init__(self, module_manager, vtkObjectBinding, progressText,
inputDescriptions, outputDescriptions,
replaceDoc=True,
inputFunctions=None, outputFunctions=None):
self._viewFrame = None
self._configVtkObj = None
# first these two mixins
ModuleBase.__init__(self, module_manager)
self._theFilter = vtkObjectBinding
if replaceDoc:
myMessage = "<em>"\
"This is a special DeVIDE module that very simply " \
"wraps a single VTK class. In general, the " \
"complete state of the class will be saved along " \
"with the rest of the network. The documentation " \
"below is that of the wrapped VTK class:</em>"
self.__doc__ = '%s\n\n%s' % (myMessage, self._theFilter.__doc__)
# now that we have the object, init the pickle mixin so
# that the state of this object will be saved
PickleVTKObjectsModuleMixin.__init__(self, ['_theFilter'])
# make progress hooks for the object
module_utils.setup_vtk_object_progress(self, self._theFilter,
progressText)
self._inputDescriptions = inputDescriptions
self._outputDescriptions = outputDescriptions
self._inputFunctions = inputFunctions
self._outputFunctions = outputFunctions
def _createViewFrame(self):
parentWindow = self._module_manager.get_module_view_parent_window()
import resources.python.defaultModuleViewFrame
reload(resources.python.defaultModuleViewFrame)
dMVF = resources.python.defaultModuleViewFrame.defaultModuleViewFrame
viewFrame = module_utils.instantiate_module_view_frame(
self, self._module_manager, dMVF)
# ConfigVtkObj parent not important, we're passing frame + panel
# this should populate the sizer with a new sizer7
# params: noParent, noRenwin, vtk_obj, frame, panel
self._configVtkObj = ConfigVtkObj(None, None,
self._theFilter,
viewFrame, viewFrame.viewFramePanel)
module_utils.create_standard_object_introspection(
self, viewFrame, viewFrame.viewFramePanel,
{'Module (self)' : self}, None)
# we don't want the Execute button to be default... else stuff gets
# executed with every enter in the command window (at least in Doze)
module_utils.create_eoca_buttons(self, viewFrame,
viewFrame.viewFramePanel,
False)
self._viewFrame = viewFrame
return viewFrame
def close(self):
# we play it safe... (the graph_editor/module_manager should have
# disconnected us by now)
for input_idx in range(len(self.get_input_descriptions())):
self.set_input(input_idx, None)
PickleVTKObjectsModuleMixin.close(self)
IntrospectModuleMixin.close(self)
if self._viewFrame is not None:
self._configVtkObj.close()
self._viewFrame.Destroy()
ModuleBase.close(self)
# get rid of our binding to the vtkObject
del self._theFilter
def get_output_descriptions(self):
return self._outputDescriptions
def get_output(self, idx):
# this will only every be invoked if your get_output_descriptions has
# 1 or more elements
if self._outputFunctions:
return eval('self._theFilter.%s' % (self._outputFunctions[idx],))
else:
return self._theFilter.GetOutput()
def get_input_descriptions(self):
return self._inputDescriptions
def set_input(self, idx, inputStream):
# this will only be called for a certain idx if you've specified that
# many elements in your get_input_descriptions
if self._inputFunctions:
exec('self._theFilter.%s' %
(self._inputFunctions[idx]))
else:
if idx == 0:
self._theFilter.SetInput(inputStream)
else:
self._theFilter.SetInput(idx, inputStream)
def execute_module(self):
# it could be a writer, in that case, call the Write method.
if hasattr(self._theFilter, 'Write') and \
callable(self._theFilter.Write):
self._theFilter.Write()
else:
self._theFilter.Update()
def streaming_execute_module(self):
"""All VTK classes should be streamable.
"""
# it could be a writer, in that case, call the Write method.
if hasattr(self._theFilter, 'Write') and \
callable(self._theFilter.Write):
self._theFilter.Write()
else:
self._theFilter.Update()
def view(self):
if self._viewFrame is None:
# we have an initial config populated with stuff and in sync
# with theFilter. The viewFrame will also be in sync with the
# filter
self._viewFrame = self._createViewFrame()
self._viewFrame.Show(True)
self._viewFrame.Raise()
def config_to_view(self):
# the pickleVTKObjectsModuleMixin does logic <-> config
# so when the user clicks "sync", logic_to_config is called
# which transfers picklable state from the LOGIC to the CONFIG
# then we do double the work and call update_gui, which transfers
# the same state from the LOGIC straight up to the VIEW
self._configVtkObj.update_gui()
def view_to_config(self):
# same thing here: user clicks "apply", view_to_config is called which
# zaps UI changes straight to the LOGIC. Then we have to call
# logic_to_config explicitly which brings the info back up to the
# config... i.e. view -> logic -> config
# after that, config_to_logic is called which transfers all state AGAIN
# from the config to the logic
self._configVtkObj.apply_changes()
self.logic_to_config()
#########################################################################
|
nagyistoce/devide
|
module_kits/vtk_kit/mixins.py
|
Python
|
bsd-3-clause
| 13,612
| 0.002424
|
from bluepy.btle import *
import time
import serial
from pyqtgraph.Qt import QtGui, QtCore
import pyqtgraph as pg
start_time = time.time()
data = []
data2 = []
data3 = []
data4 = []
angles = []
pg.setConfigOption('background', 'w')
pg.setConfigOption('foreground', 'k')
pen = pg.mkPen('k', width=8)
app = QtGui.QApplication([])
plotWidget = pg.plot(title='biomechanics')
plotWidget.setWindowTitle('elbow angle')
plotWidget.setLabels(left=('angle', 'degrees'))
plotWidget.plotItem.getAxis('left').setPen(pen)
plotWidget.plotItem.getAxis('bottom').setPen(pen)
curve = plotWidget.plot(pen=pen)
plotWidget.setYRange(20, 210)
data = [0]
ser = serial.Serial("/dev/rfcomm0", 9600, timeout=0.5)
t = [0]
# from calibration
arm_straight = 957
arm_bent = 987
class MyDelegate(DefaultDelegate):
def __init__(self):
DefaultDelegate.__init__(self)
def handleNotification(self, cHandle, data):
global data2, data3, data4, angle
if cHandle == 37:
data = data.decode("utf-8")
data2.append(data)
data3 = ''.join(data2)
data4 = data3.splitlines()
angle = 180 - (float(data4[-1]) - arm_straight) / (arm_bent - arm_straight) * 135
print(data4[-1])
angles.append(angle)
# print(data4[-1], angle)
else:
print('received an unexpected handle')
print('Attempting to connect...')
mac1 = 'a4:d5:78:0d:1c:53'
mac2 = 'a4:d5:78:0d:2e:fc'
per = Peripheral(mac1, "public")
per.setDelegate(MyDelegate())
print("Connected")
def update():
global curve, data, angles2
if per.waitForNotifications(1):
t.append(time.time() - start_time)
x = list(range(0, len(angles), 1))
angles2 = [float(i) for i in angles]
curve.setData(x[-50:-1], angles2[-50:-1])
app.processEvents()
timer = QtCore.QTimer()
timer.timeout.connect(update)
timer.start(0)
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_()
|
ac769/continuum_technologies
|
software/ble_live_read_graphical.py
|
Python
|
mit
| 2,073
| 0.002894
|
import os
import urllib2
import json
import re
##### Convert a name to a variable name
def cleanName(name):
return re.sub('[\W_]+', '', name.replace('&','And'))
##### Load the categories endpoint
file = urllib2.urlopen('http://api.simplegeo.com/1.0/features/categories.json')
contents = json.loads(file.read())
##### Instantiate the category lists
types = []
fcats = []
fsubcats = []
pcats = []
psubcats = []
##### Form the lists
for entry in contents:
thisType = entry['type']
thisCat = entry['category']
thisSubcat = entry['subcategory']
if thisType and thisType != '':
try:
types.index(thisType)
except:
types.append(thisType)
cats = pcats
subcats = psubcats
if thisType == 'Region':
cats = fcats
subcats = fsubcats
if thisCat and thisCat != '':
try:
cats.index(thisCat)
except:
cats.append(thisCat)
if thisSubcat and thisSubcat != '':
try:
subcats.index(thisSubcat)
except:
subcats.append(thisSubcat)
##### Generate the file
output = 'typedef NSString * SGFeatureType;\n\
typedef NSString * SGFeatureCategory;\n\
typedef NSString * SGFeatureSubcategory;\n'
# Feature types
output += '\n#pragma mark Feature Types\n\n'
for typ in types:
output += '#define SGFeatureType' + cleanName(typ) + ' @\"' + typ + '\"\n'
# Feature categories (Context)
output += '\n#pragma mark Feature Categories (Context)\n\n'
for cat in fcats:
output += '#define SGFeatureCategory' + cleanName(cat) + ' @\"' + cat + '\"\n'
output += '\n#pragma mark Feature Subcategories (Context)\n\n'
for subcat in fsubcats:
output += '#define SGFeatureSubcategory' + cleanName(subcat) + ' @\"' + subcat + '\"\n'
# Feature categories (Places 1.0)
output += '\n#pragma mark Place Categories (Places 1.0)\n\n'
for cat in pcats:
output += '#define SGPlaceCategory' + cleanName(cat) + ' @\"' + cat + '\"\n'
for subcat in psubcats:
output += '#define SGPlaceCategory' + cleanName(subcat) + ' @\"' + subcat + '\"\n'
##### Write file
output = '\
//\n\
// SGCategories.h\n\
// SimpleGeo.framework\n\
//\n\
// Copyright (c) 2010, SimpleGeo Inc.\n\
// All rights reserved.\n\
//\n\
// Redistribution and use in source and binary forms, with or without\n\
// modification, are permitted provided that the following conditions are met:\n\
// * Redistributions of source code must retain the above copyright\n\
// notice, this list of conditions and the following disclaimer.\n\
// * Redistributions in binary form must reproduce the above copyright\n\
// notice, this list of conditions and the following disclaimer in the\n\
// documentation and/or other materials provided with the distribution.\n\
// * Neither the name of the <organization> nor the\n\
// names of its contributors may be used to endorse or promote products\n\
// derived from this software without specific prior written permission.\n\
//\n\
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND\n\
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\n\
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\
// DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY\n\
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n\
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n\
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND\n\
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n\
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n\
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\
//\n\n' + output
script_path = os.path.realpath(__file__)
base_path = script_path.split('Scripts')[0]
output_path = base_path + 'Project/Shared/Classes/Client/Params/SGCategories.h'
outputFile = open(output_path,'w+')
outputFile.write(output)
outputFile.close()
|
simplegeo/SimpleGeo.framework
|
scripts/update-sg-categories.py
|
Python
|
bsd-3-clause
| 4,093
| 0.007085
|
import praw
import codecs
import unidecode
import os
import requests
import datetime
import regex as re
from collections import Counter
import RAKE
Rake = RAKE.Rake('foxstoplist.txt')
try:
requests.packages.urllib3.disable_warnings()
except:
pass
subreddits = ["earthporn", "japanpics"]
class redditLogger:
def __init__(self, logfile):
self.logfile = logfile
def logEntry(self, entry, level):
with codecs.open(self.logfile, mode='a', encoding='utf-8') as log:
log.write(entry + '\n')
if 'progress' in level:
print unidecode.unidecode(entry)
class reddit:
def __init__(self, logobj):
self.logger = logobj
self.ua = "Python:ReddWall:v0.1 (by /u/verstandinvictus)"
self.r = praw.Reddit(user_agent=self.ua)
self.dest = os.path.join(os.getcwdu(), 'images')
if not os.path.exists(self.dest):
os.makedirs(self.dest)
self.results = None
self.resetResults()
def resetResults(self):
self.results = dict(
count=0,
skipped=0,
failed=0,
succeeded=0,
nonexistent=0,
)
def getTopLinks(self, sub):
atop = self.r.get_subreddit(sub).get_top_from_all(limit=None)
submissions = set()
for s in (atop,):
for link in s:
submissions.add(link)
titles = list()
for each in submissions:
t = self.generateName(each)
#self.upGoerFive(titles)
def generateName(self, sub):
rawtitle = unidecode.unidecode(sub.title.lower())
spacesubs = ['\n',]
blanksubs = [
'[\[\(].*[\)\]]',
'/r/.*',
'[0-9]',
'photo.* ',
'o[cs]']
for pattern in spacesubs:
rawtitle = re.sub(pattern, ' ', rawtitle)
for pattern in blanksubs:
rawtitle = re.sub(pattern, '', rawtitle)
rawtitle = " ".join(rawtitle.split())
print rawtitle
print (Rake.run(rawtitle))
def upGoerFive(self, titles):
c = Counter()
for t in titles:
c.update(t)
for word, count in c.most_common(50):
print "{0} :: {1}".format(count, word)
def downloadImage(self, imgurl, imgname, dest=None):
if not dest:
rdest = self.dest
else:
rdest = dest
try:
imgwrite = os.path.join(rdest, imgname)
if not os.path.exists(imgwrite):
r = requests.get(imgurl)
with open(imgwrite, "wb") as code:
code.write(r.content)
self.logger.logEntry(('downloaded ' + imgname), 'progress')
self.results['succeeded'] += 1
return True
else:
self.logger.logEntry(('already have ' + imgname),
'verbose')
self.results['skipped'] += 1
return True
except:
self.logger.logEntry('failed to get: {0} from {1}'.format(
imgurl, imgname), 'verbose')
self.results['failed'] += 1
return None
if __name__ == "__main__":
dt = datetime.date.today().strftime('%m-%d-%y')
logfile = os.path.join('logs', str('reddwall ' + dt + '.log'))
logger = redditLogger(logfile)
site = reddit(logger)
logfile = os.path.join('logs', )
for target in subreddits:
site.getTopLinks(target)
|
VerstandInvictus/ReddWall
|
reddwall.py
|
Python
|
mit
| 3,528
| 0.003118
|
from __future__ import absolute_import
from django.conf.urls import patterns, include, url
from .views import Handler
handler = Handler.as_view()
urlpatterns = patterns('',
url(r'^$', handler, name='feincms_home'),
url(r'^(.*)/$', handler, name='feincms_handler'),
)
|
pjdelport/feincms
|
feincms/views/cbv/urls.py
|
Python
|
bsd-3-clause
| 278
| 0.003597
|
#TODO replace RPSGame with this class(for clarity)
__author__ = "Paul Council, Joseph Gonzoph, Anand Patel"
__version__ = "sprint1"
__credits__ = ["Greg Richards"]
# imports
from ServerPackage import Game
class RockPaperScissors(Game.Game):
""" this class simulates two players playing a game of rock, paper, scissors """
def __init__(self):
super(RockPaperScissors, self).__init__()
self.name = "Rock-Paper-Scissors"
def get_result(self, moves):
"""
unpack the tuple that was passed as a parameter
:param moves: the tuple of moves that were played between the two players
:type moves: tuple
:return result: the tuple of moves that were played if they were legal
:rtype: tuple
"""
player_one_move, player_two_move = moves
move_one_legal = self.is_legal(player_one_move)
move_two_legal = self.is_legal(player_two_move)
if move_one_legal and move_two_legal:
if player_one_move == player_two_move:
result = (0, 0)
elif (player_one_move == 0 and player_two_move != 1) \
or (player_one_move == 1 and player_two_move != 2) \
or (player_one_move == 2 and player_two_move != 0):
# result is tuple with points each player has earned respectively
result = (1, 0)
else:
result = (0, 1)
elif move_one_legal and not move_two_legal:
result = (1, 0)
elif not move_one_legal and move_two_legal:
result = (0, 1)
else:
result = (0, 0)
return result
def is_legal(self, move):
"""
Checks if the move provided is within the legal list
:param move: the tuple of moves that were played
:type move: tuple
:return: the result of checking if the moves are legal
:rtype: bool
"""
return isinstance(move, int) and (move in (0, 1, 2))
|
PaulieC/sprint2-Council
|
AvailableGames/RockPaperScissors.py
|
Python
|
apache-2.0
| 2,006
| 0.001994
|
import os
import sys
class Widget(object):
"""
Widget is a User Interface (UI) component object. A widget
object claims a rectagular region of its content, is responsible
for all drawing within that region.
"""
def __init__(self, name, width=50, height=50):
self.name = name
self.resize(width, height)
def size(self):
return (self.width, self.height)
def resize(self, width, height):
self.width, self.height = width, height
|
ishikawa/modipyd
|
examples/widget/002/widget.py
|
Python
|
mit
| 491
| 0.004073
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011-2012 Charles E. Vejnar
#
# This is free software, licensed under the GNU General Public License v3.
# See /LICENSE for more information.
#
"""
Interface classes with the `Vienna RNA <http://www.tbi.univie.ac.at/RNA>`
executable programs.
"""
import re
import subprocess
import tempfile
try:
from shutil import which
except ImportError:
#: Workaround for Python2.
#: http://stackoverflow.com/a/9877856
import os
def which(pgm):
path = os.getenv('PATH')
for p in path.split(os.path.pathsep):
p = os.path.join(p, pgm)
if os.path.exists(p) and os.access(p, os.X_OK):
return p
class RNAvienna(object):
"""Interface class for RNA programs from Vienna."""
def __init__(self):
if not which("RNAfold"):
raise EnvironmentError("RNAfold Vienna is required for Thermodynamics.")
def fold(self, seq, **kwargs):
return self._fold([seq], 'RNAfold', **kwargs)
def cofold(self, seq1, seq2, **kwargs):
return self._fold([seq1, seq2], 'RNAcofold', **kwargs)
def _fold(self, seqs, prog, **kwargs):
cmd = [format(prog), "--noPS"]
regex = r'.+\n(?P<mfe_structure>\S+) \((?P<mfe>.+)\)'
if 'constraints' in kwargs:
cmd.append('--constraint')
if kwargs.get('partfunc', False):
cmd.append('--partfunc')
if prog == 'RNAfold':
regex += (
r'\n *(?P<efe_structure>\S+) \[(?P<efe>.+)\]'
r'\n(?P<cfe_structure>\S+) \{ *(?P<cfe>\S+) '
r'd=(?P<dist>\S+)\}\n frequency of mfe struc'
r'ture in ensemble (?P<mfe_frequency>\S+); e'
r'nsemble diversity (?P<ensemble_diversity>\S+)'
)
elif prog == 'RNAcofold':
regex += (
r'\n *(?P<efe_structure>\S+) \[(?P<efe>.+)\]'
r'\n frequency of mfe structure in ensemble '
r'(?P<mfe_frequency>\S+) , delta G binding= '
r'*(?P<efe_binding>\S+)'
)
if 'temperature' in kwargs:
cmd.append('--temp=' + str(kwargs.get('temperature')))
p = subprocess.Popen(
cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
cwd=tempfile.gettempdir()
)
stdout, stderr = p.communicate(
'\n'.join(['&'.join(seqs), kwargs.get('constraints', '')]).encode()
)
decoded = re.match(regex, stdout.decode())
result = {}
for k, v in decoded.groupdict().items():
if 'structure' in k:
result[k] = v
else:
result[k] = float(v)
return result
|
PrashntS/miRmap
|
mirmap/vienna.py
|
Python
|
gpl-3.0
| 2,499
| 0.008003
|
from rlpy.Representations import IndependentDiscretization
from rlpy.Domains import GridWorld, InfiniteTrackCartPole
import numpy as np
from rlpy.Tools import __rlpy_location__
import os
def test_number_of_cells():
""" Ensure create appropriate # of cells (despite ``discretization``) """
mapDir = os.path.join(__rlpy_location__, "Domains", "GridWorldMaps")
mapname=os.path.join(mapDir, "4x5.txt") # expect 4*5 = 20 states
domain = GridWorld(mapname=mapname)
rep = IndependentDiscretization(domain, discretization=100)
assert rep.features_num == 9
rep = IndependentDiscretization(domain, discretization=5)
assert rep.features_num == 9
def test_phi_cells():
""" Ensure correct features are activated for corresponding state """
mapDir = os.path.join(__rlpy_location__, "Domains", "GridWorldMaps")
mapname=os.path.join(mapDir, "4x5.txt") # expect 4*5 = 20 states
domain = GridWorld(mapname=mapname)
rep = IndependentDiscretization(domain)
for r in np.arange(4):
for c in np.arange(5):
phiVec = rep.phi(np.array([r,c]), terminal=False)
assert sum(phiVec) == 2 # 1 for each dimension
assert phiVec[r] == 1 # correct row activated
assert phiVec[4+c] == 1 # correct col activated
def test_continuous_discr():
""" Ensure correct discretization in continuous state spaces """
# NOTE - if possible, test a domain with mixed discr/continuous
domain = InfiniteTrackCartPole.InfTrackCartPole() #2 continuous dims
rep = IndependentDiscretization(domain, discretization=20)
assert rep.features_num == 40
rep = IndependentDiscretization(domain, discretization=50)
assert rep.features_num == 100
|
imanolarrieta/RL
|
tests/test_representations/test_IndependentDiscretization.py
|
Python
|
bsd-3-clause
| 1,739
| 0.008626
|
from mieli.api import organization
from django.db import transaction
from django.conf import settings
from agora.api import link
@transaction.atomic
def create(user, **kwargs):
org = organization.get_by_username(user.username)
if org == None:
raise Exception("unknown organization for user '%s'" % user.username)
lnk = link.get(organization=org, user='agora')
if lnk == None:
raise Exception("no Agora Voting's admin link for organization '%s'" % org.domain)
kwargs = {}
kwargs['username'] = get_agora_username(user)
kwargs['password1'] = kwargs['password2'] = settings.AGORA_DEFAULT_KEY
kwargs['email'] = user.email
kwargs['first_name'] = 'Mieli user'
kwargs['__auth'] = True
r = lnk.post('user/register', **kwargs)
if 'errors' in r:
raise Exception(r['errors'])
login_kwargs = {}
login_kwargs['identification'] = kwargs['username']
login_kwargs['password'] = kwargs['password2']
login_ = login(lnk, **login_kwargs)
link_kwargs = {}
link_kwargs['user'] = kwargs['username']
link_kwargs['token'] = login_['apikey']
link.create(org.domain, **link_kwargs)
def login(lnk, identification, password=settings.AGORA_DEFAULT_KEY):
return lnk.post('user/login', identification=identification, password=password, __session=True)
@transaction.atomic
def delete(user, **kwargs):
org = organization.get_by_username(user.username)
if org == None:
raise Exception("unknown organization for user '%s'" % user.username)
link_kwargs = {}
link_kwargs['user'] = get_agora_username(user)
link.delete(org.domain, **link_kwargs)
def get_agora_username(user):
return user.username.replace('@', '_at_')
|
pirata-cat/mieli
|
agora/api/user.py
|
Python
|
agpl-3.0
| 1,722
| 0.005226
|
import multiprocessing
import numpy as np
def configure(num_jobs=8, TEST=False, subtract=0, num_proc=None, num_thread_per_proc=None):
'''
num_jobs is typically the # of genes we are parallelizing over
'''
if num_proc is None:
num_proc = multiprocessing.cpu_count() - subtract
if num_jobs > num_proc:
num_jobs = num_proc
if num_thread_per_proc is None:
num_thread_per_proc = int(np.floor(num_proc/num_jobs))
if TEST:
num_jobs = 1
num_thread_per_proc = 1
try:
import mkl
mkl.set_num_threads(num_thread_per_proc)
except ImportError:
print "MKL not available, so I'm not adjusting the number of threads"
print "Launching %d jobs with %d MKL threads each" % (num_jobs, num_thread_per_proc)
return num_jobs
|
mayavanand/RMMAFinalProject
|
build/lib/azimuth/local_multiprocessing.py
|
Python
|
bsd-3-clause
| 821
| 0.003654
|
<<<<<<< HEAD
<<<<<<< HEAD
"""Unit tests for the copy module."""
import copy
import copyreg
import weakref
import abc
from operator import le, lt, ge, gt, eq, ne
import unittest
from test import support
order_comparisons = le, lt, ge, gt
equality_comparisons = eq, ne
comparisons = order_comparisons + equality_comparisons
class TestCopy(unittest.TestCase):
# Attempt full line coverage of copy.py from top to bottom
def test_exceptions(self):
self.assertIs(copy.Error, copy.error)
self.assertTrue(issubclass(copy.Error, Exception))
# The copy() method
def test_copy_basic(self):
x = 42
y = copy.copy(x)
self.assertEqual(x, y)
def test_copy_copy(self):
class C(object):
def __init__(self, foo):
self.foo = foo
def __copy__(self):
return C(self.foo)
x = C(42)
y = copy.copy(x)
self.assertEqual(y.__class__, x.__class__)
self.assertEqual(y.foo, x.foo)
def test_copy_registry(self):
class C(object):
def __new__(cls, foo):
obj = object.__new__(cls)
obj.foo = foo
return obj
def pickle_C(obj):
return (C, (obj.foo,))
x = C(42)
self.assertRaises(TypeError, copy.copy, x)
copyreg.pickle(C, pickle_C, C)
y = copy.copy(x)
def test_copy_reduce_ex(self):
class C(object):
def __reduce_ex__(self, proto):
c.append(1)
return ""
def __reduce__(self):
self.fail("shouldn't call this")
c = []
x = C()
y = copy.copy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_copy_reduce(self):
class C(object):
def __reduce__(self):
c.append(1)
return ""
c = []
x = C()
y = copy.copy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_copy_cant(self):
class C(object):
def __getattribute__(self, name):
if name.startswith("__reduce"):
raise AttributeError(name)
return object.__getattribute__(self, name)
x = C()
self.assertRaises(copy.Error, copy.copy, x)
# Type-specific _copy_xxx() methods
def test_copy_atomic(self):
class Classic:
pass
class NewStyle(object):
pass
def f():
pass
class WithMetaclass(metaclass=abc.ABCMeta):
pass
tests = [None, 42, 2**100, 3.14, True, False, 1j,
"hello", "hello\u1234", f.__code__,
b"world", bytes(range(256)),
NewStyle, range(10), Classic, max, WithMetaclass]
for x in tests:
self.assertIs(copy.copy(x), x)
def test_copy_list(self):
x = [1, 2, 3]
self.assertEqual(copy.copy(x), x)
def test_copy_tuple(self):
x = (1, 2, 3)
self.assertEqual(copy.copy(x), x)
def test_copy_dict(self):
x = {"foo": 1, "bar": 2}
self.assertEqual(copy.copy(x), x)
def test_copy_inst_vanilla(self):
class C:
def __init__(self, foo):
self.foo = foo
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_copy(self):
class C:
def __init__(self, foo):
self.foo = foo
def __copy__(self):
return C(self.foo)
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_getinitargs(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getinitargs__(self):
return (self.foo,)
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_getstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return {"foo": self.foo}
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __setstate__(self, state):
self.foo = state["foo"]
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_getstate_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return self.foo
def __setstate__(self, state):
self.foo = state
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
# The deepcopy() method
def test_deepcopy_basic(self):
x = 42
y = copy.deepcopy(x)
self.assertEqual(y, x)
def test_deepcopy_memo(self):
# Tests of reflexive objects are under type-specific sections below.
# This tests only repetitions of objects.
x = []
x = [x, x]
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y[0], x[0])
self.assertIs(y[0], y[1])
def test_deepcopy_issubclass(self):
# XXX Note: there's no way to test the TypeError coming out of
# issubclass() -- this can only happen when an extension
# module defines a "type" that doesn't formally inherit from
# type.
class Meta(type):
pass
class C(metaclass=Meta):
pass
self.assertEqual(copy.deepcopy(C), C)
def test_deepcopy_deepcopy(self):
class C(object):
def __init__(self, foo):
self.foo = foo
def __deepcopy__(self, memo=None):
return C(self.foo)
x = C(42)
y = copy.deepcopy(x)
self.assertEqual(y.__class__, x.__class__)
self.assertEqual(y.foo, x.foo)
def test_deepcopy_registry(self):
class C(object):
def __new__(cls, foo):
obj = object.__new__(cls)
obj.foo = foo
return obj
def pickle_C(obj):
return (C, (obj.foo,))
x = C(42)
self.assertRaises(TypeError, copy.deepcopy, x)
copyreg.pickle(C, pickle_C, C)
y = copy.deepcopy(x)
def test_deepcopy_reduce_ex(self):
class C(object):
def __reduce_ex__(self, proto):
c.append(1)
return ""
def __reduce__(self):
self.fail("shouldn't call this")
c = []
x = C()
y = copy.deepcopy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_deepcopy_reduce(self):
class C(object):
def __reduce__(self):
c.append(1)
return ""
c = []
x = C()
y = copy.deepcopy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_deepcopy_cant(self):
class C(object):
def __getattribute__(self, name):
if name.startswith("__reduce"):
raise AttributeError(name)
return object.__getattribute__(self, name)
x = C()
self.assertRaises(copy.Error, copy.deepcopy, x)
# Type-specific _deepcopy_xxx() methods
def test_deepcopy_atomic(self):
class Classic:
pass
class NewStyle(object):
pass
def f():
pass
tests = [None, 42, 2**100, 3.14, True, False, 1j,
"hello", "hello\u1234", f.__code__,
NewStyle, range(10), Classic, max]
for x in tests:
self.assertIs(copy.deepcopy(x), x)
def test_deepcopy_list(self):
x = [[1, 2], 3]
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_deepcopy_reflexive_list(self):
x = []
x.append(x)
y = copy.deepcopy(x)
for op in comparisons:
self.assertRaises(RuntimeError, op, y, x)
self.assertIsNot(y, x)
self.assertIs(y[0], y)
self.assertEqual(len(y), 1)
def test_deepcopy_empty_tuple(self):
x = ()
y = copy.deepcopy(x)
self.assertIs(x, y)
def test_deepcopy_tuple(self):
x = ([1, 2], 3)
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_deepcopy_tuple_of_immutables(self):
x = ((1, 2), 3)
y = copy.deepcopy(x)
self.assertIs(x, y)
def test_deepcopy_reflexive_tuple(self):
x = ([],)
x[0].append(x)
y = copy.deepcopy(x)
for op in comparisons:
self.assertRaises(RuntimeError, op, y, x)
self.assertIsNot(y, x)
self.assertIsNot(y[0], x[0])
self.assertIs(y[0][0], y)
def test_deepcopy_dict(self):
x = {"foo": [1, 2], "bar": 3}
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(x, y)
self.assertIsNot(x["foo"], y["foo"])
def test_deepcopy_reflexive_dict(self):
x = {}
x['foo'] = x
y = copy.deepcopy(x)
for op in order_comparisons:
self.assertRaises(TypeError, op, y, x)
for op in equality_comparisons:
self.assertRaises(RuntimeError, op, y, x)
self.assertIsNot(y, x)
self.assertIs(y['foo'], y)
self.assertEqual(len(y), 1)
def test_deepcopy_keepalive(self):
memo = {}
x = []
y = copy.deepcopy(x, memo)
self.assertIs(memo[id(memo)][0], x)
def test_deepcopy_dont_memo_immutable(self):
memo = {}
x = [1, 2, 3, 4]
y = copy.deepcopy(x, memo)
self.assertEqual(y, x)
# There's the entry for the new list, and the keep alive.
self.assertEqual(len(memo), 2)
memo = {}
x = [(1, 2)]
y = copy.deepcopy(x, memo)
self.assertEqual(y, x)
# Tuples with immutable contents are immutable for deepcopy.
self.assertEqual(len(memo), 2)
def test_deepcopy_inst_vanilla(self):
class C:
def __init__(self, foo):
self.foo = foo
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_deepcopy(self):
class C:
def __init__(self, foo):
self.foo = foo
def __deepcopy__(self, memo):
return C(copy.deepcopy(self.foo, memo))
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getinitargs(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getinitargs__(self):
return (self.foo,)
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return {"foo": self.foo}
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __setstate__(self, state):
self.foo = state["foo"]
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getstate_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return self.foo
def __setstate__(self, state):
self.foo = state
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_reflexive_inst(self):
class C:
pass
x = C()
x.foo = x
y = copy.deepcopy(x)
self.assertIsNot(y, x)
self.assertIs(y.foo, y)
# _reconstruct()
def test_reconstruct_string(self):
class C(object):
def __reduce__(self):
return ""
x = C()
y = copy.copy(x)
self.assertIs(y, x)
y = copy.deepcopy(x)
self.assertIs(y, x)
def test_reconstruct_nostate(self):
class C(object):
def __reduce__(self):
return (C, ())
x = C()
x.foo = 42
y = copy.copy(x)
self.assertIs(y.__class__, x.__class__)
y = copy.deepcopy(x)
self.assertIs(y.__class__, x.__class__)
def test_reconstruct_state(self):
class C(object):
def __reduce__(self):
return (C, (), self.__dict__)
def __eq__(self, other):
return self.__dict__ == other.__dict__
x = C()
x.foo = [42]
y = copy.copy(x)
self.assertEqual(y, x)
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y.foo, x.foo)
def test_reconstruct_state_setstate(self):
class C(object):
def __reduce__(self):
return (C, (), self.__dict__)
def __setstate__(self, state):
self.__dict__.update(state)
def __eq__(self, other):
return self.__dict__ == other.__dict__
x = C()
x.foo = [42]
y = copy.copy(x)
self.assertEqual(y, x)
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y.foo, x.foo)
def test_reconstruct_reflexive(self):
class C(object):
pass
x = C()
x.foo = x
y = copy.deepcopy(x)
self.assertIsNot(y, x)
self.assertIs(y.foo, y)
# Additions for Python 2.3 and pickle protocol 2
def test_reduce_4tuple(self):
class C(list):
def __reduce__(self):
return (C, (), self.__dict__, iter(self))
def __eq__(self, other):
return (list(self) == list(other) and
self.__dict__ == other.__dict__)
x = C([[1, 2], 3])
y = copy.copy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIs(x[0], y[0])
y = copy.deepcopy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_reduce_5tuple(self):
class C(dict):
def __reduce__(self):
return (C, (), self.__dict__, None, self.items())
def __eq__(self, other):
return (dict(self) == dict(other) and
self.__dict__ == other.__dict__)
x = C([("foo", [1, 2]), ("bar", 3)])
y = copy.copy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIs(x["foo"], y["foo"])
y = copy.deepcopy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIsNot(x["foo"], y["foo"])
def test_copy_slots(self):
class C(object):
__slots__ = ["foo"]
x = C()
x.foo = [42]
y = copy.copy(x)
self.assertIs(x.foo, y.foo)
def test_deepcopy_slots(self):
class C(object):
__slots__ = ["foo"]
x = C()
x.foo = [42]
y = copy.deepcopy(x)
self.assertEqual(x.foo, y.foo)
self.assertIsNot(x.foo, y.foo)
def test_deepcopy_dict_subclass(self):
class C(dict):
def __init__(self, d=None):
if not d:
d = {}
self._keys = list(d.keys())
super().__init__(d)
def __setitem__(self, key, item):
super().__setitem__(key, item)
if key not in self._keys:
self._keys.append(key)
x = C(d={'foo':0})
y = copy.deepcopy(x)
self.assertEqual(x, y)
self.assertEqual(x._keys, y._keys)
self.assertIsNot(x, y)
x['bar'] = 1
self.assertNotEqual(x, y)
self.assertNotEqual(x._keys, y._keys)
def test_copy_list_subclass(self):
class C(list):
pass
x = C([[1, 2], 3])
x.foo = [4, 5]
y = copy.copy(x)
self.assertEqual(list(x), list(y))
self.assertEqual(x.foo, y.foo)
self.assertIs(x[0], y[0])
self.assertIs(x.foo, y.foo)
def test_deepcopy_list_subclass(self):
class C(list):
pass
x = C([[1, 2], 3])
x.foo = [4, 5]
y = copy.deepcopy(x)
self.assertEqual(list(x), list(y))
self.assertEqual(x.foo, y.foo)
self.assertIsNot(x[0], y[0])
self.assertIsNot(x.foo, y.foo)
def test_copy_tuple_subclass(self):
class C(tuple):
pass
x = C([1, 2, 3])
self.assertEqual(tuple(x), (1, 2, 3))
y = copy.copy(x)
self.assertEqual(tuple(y), (1, 2, 3))
def test_deepcopy_tuple_subclass(self):
class C(tuple):
pass
x = C([[1, 2], 3])
self.assertEqual(tuple(x), ([1, 2], 3))
y = copy.deepcopy(x)
self.assertEqual(tuple(y), ([1, 2], 3))
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_getstate_exc(self):
class EvilState(object):
def __getstate__(self):
raise ValueError("ain't got no stickin' state")
self.assertRaises(ValueError, copy.copy, EvilState())
def test_copy_function(self):
self.assertEqual(copy.copy(global_foo), global_foo)
def foo(x, y): return x+y
self.assertEqual(copy.copy(foo), foo)
bar = lambda: None
self.assertEqual(copy.copy(bar), bar)
def test_deepcopy_function(self):
self.assertEqual(copy.deepcopy(global_foo), global_foo)
def foo(x, y): return x+y
self.assertEqual(copy.deepcopy(foo), foo)
bar = lambda: None
self.assertEqual(copy.deepcopy(bar), bar)
def _check_weakref(self, _copy):
class C(object):
pass
obj = C()
x = weakref.ref(obj)
y = _copy(x)
self.assertIs(y, x)
del obj
y = _copy(x)
self.assertIs(y, x)
def test_copy_weakref(self):
self._check_weakref(copy.copy)
def test_deepcopy_weakref(self):
self._check_weakref(copy.deepcopy)
def _check_copy_weakdict(self, _dicttype):
class C(object):
pass
a, b, c, d = [C() for i in range(4)]
u = _dicttype()
u[a] = b
u[c] = d
v = copy.copy(u)
self.assertIsNot(v, u)
self.assertEqual(v, u)
self.assertEqual(v[a], b)
self.assertEqual(v[c], d)
self.assertEqual(len(v), 2)
del c, d
self.assertEqual(len(v), 1)
x, y = C(), C()
# The underlying containers are decoupled
v[x] = y
self.assertNotIn(x, u)
def test_copy_weakkeydict(self):
self._check_copy_weakdict(weakref.WeakKeyDictionary)
def test_copy_weakvaluedict(self):
self._check_copy_weakdict(weakref.WeakValueDictionary)
def test_deepcopy_weakkeydict(self):
class C(object):
def __init__(self, i):
self.i = i
a, b, c, d = [C(i) for i in range(4)]
u = weakref.WeakKeyDictionary()
u[a] = b
u[c] = d
# Keys aren't copied, values are
v = copy.deepcopy(u)
self.assertNotEqual(v, u)
self.assertEqual(len(v), 2)
self.assertIsNot(v[a], b)
self.assertIsNot(v[c], d)
self.assertEqual(v[a].i, b.i)
self.assertEqual(v[c].i, d.i)
del c
self.assertEqual(len(v), 1)
def test_deepcopy_weakvaluedict(self):
class C(object):
def __init__(self, i):
self.i = i
a, b, c, d = [C(i) for i in range(4)]
u = weakref.WeakValueDictionary()
u[a] = b
u[c] = d
# Keys are copied, values aren't
v = copy.deepcopy(u)
self.assertNotEqual(v, u)
self.assertEqual(len(v), 2)
(x, y), (z, t) = sorted(v.items(), key=lambda pair: pair[0].i)
self.assertIsNot(x, a)
self.assertEqual(x.i, a.i)
self.assertIs(y, b)
self.assertIsNot(z, c)
self.assertEqual(z.i, c.i)
self.assertIs(t, d)
del x, y, z, t
del d
self.assertEqual(len(v), 1)
def test_deepcopy_bound_method(self):
class Foo(object):
def m(self):
pass
f = Foo()
f.b = f.m
g = copy.deepcopy(f)
self.assertEqual(g.m, g.b)
self.assertIs(g.b.__self__, g)
g.b()
def global_foo(x, y): return x+y
def test_main():
support.run_unittest(TestCopy)
if __name__ == "__main__":
test_main()
=======
"""Unit tests for the copy module."""
import copy
import copyreg
import weakref
import abc
from operator import le, lt, ge, gt, eq, ne
import unittest
from test import support
order_comparisons = le, lt, ge, gt
equality_comparisons = eq, ne
comparisons = order_comparisons + equality_comparisons
class TestCopy(unittest.TestCase):
# Attempt full line coverage of copy.py from top to bottom
def test_exceptions(self):
self.assertIs(copy.Error, copy.error)
self.assertTrue(issubclass(copy.Error, Exception))
# The copy() method
def test_copy_basic(self):
x = 42
y = copy.copy(x)
self.assertEqual(x, y)
def test_copy_copy(self):
class C(object):
def __init__(self, foo):
self.foo = foo
def __copy__(self):
return C(self.foo)
x = C(42)
y = copy.copy(x)
self.assertEqual(y.__class__, x.__class__)
self.assertEqual(y.foo, x.foo)
def test_copy_registry(self):
class C(object):
def __new__(cls, foo):
obj = object.__new__(cls)
obj.foo = foo
return obj
def pickle_C(obj):
return (C, (obj.foo,))
x = C(42)
self.assertRaises(TypeError, copy.copy, x)
copyreg.pickle(C, pickle_C, C)
y = copy.copy(x)
def test_copy_reduce_ex(self):
class C(object):
def __reduce_ex__(self, proto):
c.append(1)
return ""
def __reduce__(self):
self.fail("shouldn't call this")
c = []
x = C()
y = copy.copy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_copy_reduce(self):
class C(object):
def __reduce__(self):
c.append(1)
return ""
c = []
x = C()
y = copy.copy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_copy_cant(self):
class C(object):
def __getattribute__(self, name):
if name.startswith("__reduce"):
raise AttributeError(name)
return object.__getattribute__(self, name)
x = C()
self.assertRaises(copy.Error, copy.copy, x)
# Type-specific _copy_xxx() methods
def test_copy_atomic(self):
class Classic:
pass
class NewStyle(object):
pass
def f():
pass
class WithMetaclass(metaclass=abc.ABCMeta):
pass
tests = [None, 42, 2**100, 3.14, True, False, 1j,
"hello", "hello\u1234", f.__code__,
b"world", bytes(range(256)),
NewStyle, range(10), Classic, max, WithMetaclass]
for x in tests:
self.assertIs(copy.copy(x), x)
def test_copy_list(self):
x = [1, 2, 3]
self.assertEqual(copy.copy(x), x)
def test_copy_tuple(self):
x = (1, 2, 3)
self.assertEqual(copy.copy(x), x)
def test_copy_dict(self):
x = {"foo": 1, "bar": 2}
self.assertEqual(copy.copy(x), x)
def test_copy_inst_vanilla(self):
class C:
def __init__(self, foo):
self.foo = foo
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_copy(self):
class C:
def __init__(self, foo):
self.foo = foo
def __copy__(self):
return C(self.foo)
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_getinitargs(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getinitargs__(self):
return (self.foo,)
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_getstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return {"foo": self.foo}
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __setstate__(self, state):
self.foo = state["foo"]
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_getstate_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return self.foo
def __setstate__(self, state):
self.foo = state
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
# The deepcopy() method
def test_deepcopy_basic(self):
x = 42
y = copy.deepcopy(x)
self.assertEqual(y, x)
def test_deepcopy_memo(self):
# Tests of reflexive objects are under type-specific sections below.
# This tests only repetitions of objects.
x = []
x = [x, x]
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y[0], x[0])
self.assertIs(y[0], y[1])
def test_deepcopy_issubclass(self):
# XXX Note: there's no way to test the TypeError coming out of
# issubclass() -- this can only happen when an extension
# module defines a "type" that doesn't formally inherit from
# type.
class Meta(type):
pass
class C(metaclass=Meta):
pass
self.assertEqual(copy.deepcopy(C), C)
def test_deepcopy_deepcopy(self):
class C(object):
def __init__(self, foo):
self.foo = foo
def __deepcopy__(self, memo=None):
return C(self.foo)
x = C(42)
y = copy.deepcopy(x)
self.assertEqual(y.__class__, x.__class__)
self.assertEqual(y.foo, x.foo)
def test_deepcopy_registry(self):
class C(object):
def __new__(cls, foo):
obj = object.__new__(cls)
obj.foo = foo
return obj
def pickle_C(obj):
return (C, (obj.foo,))
x = C(42)
self.assertRaises(TypeError, copy.deepcopy, x)
copyreg.pickle(C, pickle_C, C)
y = copy.deepcopy(x)
def test_deepcopy_reduce_ex(self):
class C(object):
def __reduce_ex__(self, proto):
c.append(1)
return ""
def __reduce__(self):
self.fail("shouldn't call this")
c = []
x = C()
y = copy.deepcopy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_deepcopy_reduce(self):
class C(object):
def __reduce__(self):
c.append(1)
return ""
c = []
x = C()
y = copy.deepcopy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_deepcopy_cant(self):
class C(object):
def __getattribute__(self, name):
if name.startswith("__reduce"):
raise AttributeError(name)
return object.__getattribute__(self, name)
x = C()
self.assertRaises(copy.Error, copy.deepcopy, x)
# Type-specific _deepcopy_xxx() methods
def test_deepcopy_atomic(self):
class Classic:
pass
class NewStyle(object):
pass
def f():
pass
tests = [None, 42, 2**100, 3.14, True, False, 1j,
"hello", "hello\u1234", f.__code__,
NewStyle, range(10), Classic, max]
for x in tests:
self.assertIs(copy.deepcopy(x), x)
def test_deepcopy_list(self):
x = [[1, 2], 3]
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_deepcopy_reflexive_list(self):
x = []
x.append(x)
y = copy.deepcopy(x)
for op in comparisons:
self.assertRaises(RuntimeError, op, y, x)
self.assertIsNot(y, x)
self.assertIs(y[0], y)
self.assertEqual(len(y), 1)
def test_deepcopy_empty_tuple(self):
x = ()
y = copy.deepcopy(x)
self.assertIs(x, y)
def test_deepcopy_tuple(self):
x = ([1, 2], 3)
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_deepcopy_tuple_of_immutables(self):
x = ((1, 2), 3)
y = copy.deepcopy(x)
self.assertIs(x, y)
def test_deepcopy_reflexive_tuple(self):
x = ([],)
x[0].append(x)
y = copy.deepcopy(x)
for op in comparisons:
self.assertRaises(RuntimeError, op, y, x)
self.assertIsNot(y, x)
self.assertIsNot(y[0], x[0])
self.assertIs(y[0][0], y)
def test_deepcopy_dict(self):
x = {"foo": [1, 2], "bar": 3}
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(x, y)
self.assertIsNot(x["foo"], y["foo"])
def test_deepcopy_reflexive_dict(self):
x = {}
x['foo'] = x
y = copy.deepcopy(x)
for op in order_comparisons:
self.assertRaises(TypeError, op, y, x)
for op in equality_comparisons:
self.assertRaises(RuntimeError, op, y, x)
self.assertIsNot(y, x)
self.assertIs(y['foo'], y)
self.assertEqual(len(y), 1)
def test_deepcopy_keepalive(self):
memo = {}
x = []
y = copy.deepcopy(x, memo)
self.assertIs(memo[id(memo)][0], x)
def test_deepcopy_dont_memo_immutable(self):
memo = {}
x = [1, 2, 3, 4]
y = copy.deepcopy(x, memo)
self.assertEqual(y, x)
# There's the entry for the new list, and the keep alive.
self.assertEqual(len(memo), 2)
memo = {}
x = [(1, 2)]
y = copy.deepcopy(x, memo)
self.assertEqual(y, x)
# Tuples with immutable contents are immutable for deepcopy.
self.assertEqual(len(memo), 2)
def test_deepcopy_inst_vanilla(self):
class C:
def __init__(self, foo):
self.foo = foo
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_deepcopy(self):
class C:
def __init__(self, foo):
self.foo = foo
def __deepcopy__(self, memo):
return C(copy.deepcopy(self.foo, memo))
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getinitargs(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getinitargs__(self):
return (self.foo,)
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return {"foo": self.foo}
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __setstate__(self, state):
self.foo = state["foo"]
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getstate_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return self.foo
def __setstate__(self, state):
self.foo = state
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_reflexive_inst(self):
class C:
pass
x = C()
x.foo = x
y = copy.deepcopy(x)
self.assertIsNot(y, x)
self.assertIs(y.foo, y)
# _reconstruct()
def test_reconstruct_string(self):
class C(object):
def __reduce__(self):
return ""
x = C()
y = copy.copy(x)
self.assertIs(y, x)
y = copy.deepcopy(x)
self.assertIs(y, x)
def test_reconstruct_nostate(self):
class C(object):
def __reduce__(self):
return (C, ())
x = C()
x.foo = 42
y = copy.copy(x)
self.assertIs(y.__class__, x.__class__)
y = copy.deepcopy(x)
self.assertIs(y.__class__, x.__class__)
def test_reconstruct_state(self):
class C(object):
def __reduce__(self):
return (C, (), self.__dict__)
def __eq__(self, other):
return self.__dict__ == other.__dict__
x = C()
x.foo = [42]
y = copy.copy(x)
self.assertEqual(y, x)
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y.foo, x.foo)
def test_reconstruct_state_setstate(self):
class C(object):
def __reduce__(self):
return (C, (), self.__dict__)
def __setstate__(self, state):
self.__dict__.update(state)
def __eq__(self, other):
return self.__dict__ == other.__dict__
x = C()
x.foo = [42]
y = copy.copy(x)
self.assertEqual(y, x)
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y.foo, x.foo)
def test_reconstruct_reflexive(self):
class C(object):
pass
x = C()
x.foo = x
y = copy.deepcopy(x)
self.assertIsNot(y, x)
self.assertIs(y.foo, y)
# Additions for Python 2.3 and pickle protocol 2
def test_reduce_4tuple(self):
class C(list):
def __reduce__(self):
return (C, (), self.__dict__, iter(self))
def __eq__(self, other):
return (list(self) == list(other) and
self.__dict__ == other.__dict__)
x = C([[1, 2], 3])
y = copy.copy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIs(x[0], y[0])
y = copy.deepcopy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_reduce_5tuple(self):
class C(dict):
def __reduce__(self):
return (C, (), self.__dict__, None, self.items())
def __eq__(self, other):
return (dict(self) == dict(other) and
self.__dict__ == other.__dict__)
x = C([("foo", [1, 2]), ("bar", 3)])
y = copy.copy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIs(x["foo"], y["foo"])
y = copy.deepcopy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIsNot(x["foo"], y["foo"])
def test_copy_slots(self):
class C(object):
__slots__ = ["foo"]
x = C()
x.foo = [42]
y = copy.copy(x)
self.assertIs(x.foo, y.foo)
def test_deepcopy_slots(self):
class C(object):
__slots__ = ["foo"]
x = C()
x.foo = [42]
y = copy.deepcopy(x)
self.assertEqual(x.foo, y.foo)
self.assertIsNot(x.foo, y.foo)
def test_deepcopy_dict_subclass(self):
class C(dict):
def __init__(self, d=None):
if not d:
d = {}
self._keys = list(d.keys())
super().__init__(d)
def __setitem__(self, key, item):
super().__setitem__(key, item)
if key not in self._keys:
self._keys.append(key)
x = C(d={'foo':0})
y = copy.deepcopy(x)
self.assertEqual(x, y)
self.assertEqual(x._keys, y._keys)
self.assertIsNot(x, y)
x['bar'] = 1
self.assertNotEqual(x, y)
self.assertNotEqual(x._keys, y._keys)
def test_copy_list_subclass(self):
class C(list):
pass
x = C([[1, 2], 3])
x.foo = [4, 5]
y = copy.copy(x)
self.assertEqual(list(x), list(y))
self.assertEqual(x.foo, y.foo)
self.assertIs(x[0], y[0])
self.assertIs(x.foo, y.foo)
def test_deepcopy_list_subclass(self):
class C(list):
pass
x = C([[1, 2], 3])
x.foo = [4, 5]
y = copy.deepcopy(x)
self.assertEqual(list(x), list(y))
self.assertEqual(x.foo, y.foo)
self.assertIsNot(x[0], y[0])
self.assertIsNot(x.foo, y.foo)
def test_copy_tuple_subclass(self):
class C(tuple):
pass
x = C([1, 2, 3])
self.assertEqual(tuple(x), (1, 2, 3))
y = copy.copy(x)
self.assertEqual(tuple(y), (1, 2, 3))
def test_deepcopy_tuple_subclass(self):
class C(tuple):
pass
x = C([[1, 2], 3])
self.assertEqual(tuple(x), ([1, 2], 3))
y = copy.deepcopy(x)
self.assertEqual(tuple(y), ([1, 2], 3))
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_getstate_exc(self):
class EvilState(object):
def __getstate__(self):
raise ValueError("ain't got no stickin' state")
self.assertRaises(ValueError, copy.copy, EvilState())
def test_copy_function(self):
self.assertEqual(copy.copy(global_foo), global_foo)
def foo(x, y): return x+y
self.assertEqual(copy.copy(foo), foo)
bar = lambda: None
self.assertEqual(copy.copy(bar), bar)
def test_deepcopy_function(self):
self.assertEqual(copy.deepcopy(global_foo), global_foo)
def foo(x, y): return x+y
self.assertEqual(copy.deepcopy(foo), foo)
bar = lambda: None
self.assertEqual(copy.deepcopy(bar), bar)
def _check_weakref(self, _copy):
class C(object):
pass
obj = C()
x = weakref.ref(obj)
y = _copy(x)
self.assertIs(y, x)
del obj
y = _copy(x)
self.assertIs(y, x)
def test_copy_weakref(self):
self._check_weakref(copy.copy)
def test_deepcopy_weakref(self):
self._check_weakref(copy.deepcopy)
def _check_copy_weakdict(self, _dicttype):
class C(object):
pass
a, b, c, d = [C() for i in range(4)]
u = _dicttype()
u[a] = b
u[c] = d
v = copy.copy(u)
self.assertIsNot(v, u)
self.assertEqual(v, u)
self.assertEqual(v[a], b)
self.assertEqual(v[c], d)
self.assertEqual(len(v), 2)
del c, d
self.assertEqual(len(v), 1)
x, y = C(), C()
# The underlying containers are decoupled
v[x] = y
self.assertNotIn(x, u)
def test_copy_weakkeydict(self):
self._check_copy_weakdict(weakref.WeakKeyDictionary)
def test_copy_weakvaluedict(self):
self._check_copy_weakdict(weakref.WeakValueDictionary)
def test_deepcopy_weakkeydict(self):
class C(object):
def __init__(self, i):
self.i = i
a, b, c, d = [C(i) for i in range(4)]
u = weakref.WeakKeyDictionary()
u[a] = b
u[c] = d
# Keys aren't copied, values are
v = copy.deepcopy(u)
self.assertNotEqual(v, u)
self.assertEqual(len(v), 2)
self.assertIsNot(v[a], b)
self.assertIsNot(v[c], d)
self.assertEqual(v[a].i, b.i)
self.assertEqual(v[c].i, d.i)
del c
self.assertEqual(len(v), 1)
def test_deepcopy_weakvaluedict(self):
class C(object):
def __init__(self, i):
self.i = i
a, b, c, d = [C(i) for i in range(4)]
u = weakref.WeakValueDictionary()
u[a] = b
u[c] = d
# Keys are copied, values aren't
v = copy.deepcopy(u)
self.assertNotEqual(v, u)
self.assertEqual(len(v), 2)
(x, y), (z, t) = sorted(v.items(), key=lambda pair: pair[0].i)
self.assertIsNot(x, a)
self.assertEqual(x.i, a.i)
self.assertIs(y, b)
self.assertIsNot(z, c)
self.assertEqual(z.i, c.i)
self.assertIs(t, d)
del x, y, z, t
del d
self.assertEqual(len(v), 1)
def test_deepcopy_bound_method(self):
class Foo(object):
def m(self):
pass
f = Foo()
f.b = f.m
g = copy.deepcopy(f)
self.assertEqual(g.m, g.b)
self.assertIs(g.b.__self__, g)
g.b()
def global_foo(x, y): return x+y
def test_main():
support.run_unittest(TestCopy)
if __name__ == "__main__":
test_main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
"""Unit tests for the copy module."""
import copy
import copyreg
import weakref
import abc
from operator import le, lt, ge, gt, eq, ne
import unittest
from test import support
order_comparisons = le, lt, ge, gt
equality_comparisons = eq, ne
comparisons = order_comparisons + equality_comparisons
class TestCopy(unittest.TestCase):
# Attempt full line coverage of copy.py from top to bottom
def test_exceptions(self):
self.assertIs(copy.Error, copy.error)
self.assertTrue(issubclass(copy.Error, Exception))
# The copy() method
def test_copy_basic(self):
x = 42
y = copy.copy(x)
self.assertEqual(x, y)
def test_copy_copy(self):
class C(object):
def __init__(self, foo):
self.foo = foo
def __copy__(self):
return C(self.foo)
x = C(42)
y = copy.copy(x)
self.assertEqual(y.__class__, x.__class__)
self.assertEqual(y.foo, x.foo)
def test_copy_registry(self):
class C(object):
def __new__(cls, foo):
obj = object.__new__(cls)
obj.foo = foo
return obj
def pickle_C(obj):
return (C, (obj.foo,))
x = C(42)
self.assertRaises(TypeError, copy.copy, x)
copyreg.pickle(C, pickle_C, C)
y = copy.copy(x)
def test_copy_reduce_ex(self):
class C(object):
def __reduce_ex__(self, proto):
c.append(1)
return ""
def __reduce__(self):
self.fail("shouldn't call this")
c = []
x = C()
y = copy.copy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_copy_reduce(self):
class C(object):
def __reduce__(self):
c.append(1)
return ""
c = []
x = C()
y = copy.copy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_copy_cant(self):
class C(object):
def __getattribute__(self, name):
if name.startswith("__reduce"):
raise AttributeError(name)
return object.__getattribute__(self, name)
x = C()
self.assertRaises(copy.Error, copy.copy, x)
# Type-specific _copy_xxx() methods
def test_copy_atomic(self):
class Classic:
pass
class NewStyle(object):
pass
def f():
pass
class WithMetaclass(metaclass=abc.ABCMeta):
pass
tests = [None, 42, 2**100, 3.14, True, False, 1j,
"hello", "hello\u1234", f.__code__,
b"world", bytes(range(256)),
NewStyle, range(10), Classic, max, WithMetaclass]
for x in tests:
self.assertIs(copy.copy(x), x)
def test_copy_list(self):
x = [1, 2, 3]
self.assertEqual(copy.copy(x), x)
def test_copy_tuple(self):
x = (1, 2, 3)
self.assertEqual(copy.copy(x), x)
def test_copy_dict(self):
x = {"foo": 1, "bar": 2}
self.assertEqual(copy.copy(x), x)
def test_copy_inst_vanilla(self):
class C:
def __init__(self, foo):
self.foo = foo
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_copy(self):
class C:
def __init__(self, foo):
self.foo = foo
def __copy__(self):
return C(self.foo)
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_getinitargs(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getinitargs__(self):
return (self.foo,)
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_getstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return {"foo": self.foo}
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __setstate__(self, state):
self.foo = state["foo"]
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
def test_copy_inst_getstate_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return self.foo
def __setstate__(self, state):
self.foo = state
def __eq__(self, other):
return self.foo == other.foo
x = C(42)
self.assertEqual(copy.copy(x), x)
# The deepcopy() method
def test_deepcopy_basic(self):
x = 42
y = copy.deepcopy(x)
self.assertEqual(y, x)
def test_deepcopy_memo(self):
# Tests of reflexive objects are under type-specific sections below.
# This tests only repetitions of objects.
x = []
x = [x, x]
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y[0], x[0])
self.assertIs(y[0], y[1])
def test_deepcopy_issubclass(self):
# XXX Note: there's no way to test the TypeError coming out of
# issubclass() -- this can only happen when an extension
# module defines a "type" that doesn't formally inherit from
# type.
class Meta(type):
pass
class C(metaclass=Meta):
pass
self.assertEqual(copy.deepcopy(C), C)
def test_deepcopy_deepcopy(self):
class C(object):
def __init__(self, foo):
self.foo = foo
def __deepcopy__(self, memo=None):
return C(self.foo)
x = C(42)
y = copy.deepcopy(x)
self.assertEqual(y.__class__, x.__class__)
self.assertEqual(y.foo, x.foo)
def test_deepcopy_registry(self):
class C(object):
def __new__(cls, foo):
obj = object.__new__(cls)
obj.foo = foo
return obj
def pickle_C(obj):
return (C, (obj.foo,))
x = C(42)
self.assertRaises(TypeError, copy.deepcopy, x)
copyreg.pickle(C, pickle_C, C)
y = copy.deepcopy(x)
def test_deepcopy_reduce_ex(self):
class C(object):
def __reduce_ex__(self, proto):
c.append(1)
return ""
def __reduce__(self):
self.fail("shouldn't call this")
c = []
x = C()
y = copy.deepcopy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_deepcopy_reduce(self):
class C(object):
def __reduce__(self):
c.append(1)
return ""
c = []
x = C()
y = copy.deepcopy(x)
self.assertIs(y, x)
self.assertEqual(c, [1])
def test_deepcopy_cant(self):
class C(object):
def __getattribute__(self, name):
if name.startswith("__reduce"):
raise AttributeError(name)
return object.__getattribute__(self, name)
x = C()
self.assertRaises(copy.Error, copy.deepcopy, x)
# Type-specific _deepcopy_xxx() methods
def test_deepcopy_atomic(self):
class Classic:
pass
class NewStyle(object):
pass
def f():
pass
tests = [None, 42, 2**100, 3.14, True, False, 1j,
"hello", "hello\u1234", f.__code__,
NewStyle, range(10), Classic, max]
for x in tests:
self.assertIs(copy.deepcopy(x), x)
def test_deepcopy_list(self):
x = [[1, 2], 3]
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_deepcopy_reflexive_list(self):
x = []
x.append(x)
y = copy.deepcopy(x)
for op in comparisons:
self.assertRaises(RuntimeError, op, y, x)
self.assertIsNot(y, x)
self.assertIs(y[0], y)
self.assertEqual(len(y), 1)
def test_deepcopy_empty_tuple(self):
x = ()
y = copy.deepcopy(x)
self.assertIs(x, y)
def test_deepcopy_tuple(self):
x = ([1, 2], 3)
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_deepcopy_tuple_of_immutables(self):
x = ((1, 2), 3)
y = copy.deepcopy(x)
self.assertIs(x, y)
def test_deepcopy_reflexive_tuple(self):
x = ([],)
x[0].append(x)
y = copy.deepcopy(x)
for op in comparisons:
self.assertRaises(RuntimeError, op, y, x)
self.assertIsNot(y, x)
self.assertIsNot(y[0], x[0])
self.assertIs(y[0][0], y)
def test_deepcopy_dict(self):
x = {"foo": [1, 2], "bar": 3}
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(x, y)
self.assertIsNot(x["foo"], y["foo"])
def test_deepcopy_reflexive_dict(self):
x = {}
x['foo'] = x
y = copy.deepcopy(x)
for op in order_comparisons:
self.assertRaises(TypeError, op, y, x)
for op in equality_comparisons:
self.assertRaises(RuntimeError, op, y, x)
self.assertIsNot(y, x)
self.assertIs(y['foo'], y)
self.assertEqual(len(y), 1)
def test_deepcopy_keepalive(self):
memo = {}
x = []
y = copy.deepcopy(x, memo)
self.assertIs(memo[id(memo)][0], x)
def test_deepcopy_dont_memo_immutable(self):
memo = {}
x = [1, 2, 3, 4]
y = copy.deepcopy(x, memo)
self.assertEqual(y, x)
# There's the entry for the new list, and the keep alive.
self.assertEqual(len(memo), 2)
memo = {}
x = [(1, 2)]
y = copy.deepcopy(x, memo)
self.assertEqual(y, x)
# Tuples with immutable contents are immutable for deepcopy.
self.assertEqual(len(memo), 2)
def test_deepcopy_inst_vanilla(self):
class C:
def __init__(self, foo):
self.foo = foo
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_deepcopy(self):
class C:
def __init__(self, foo):
self.foo = foo
def __deepcopy__(self, memo):
return C(copy.deepcopy(self.foo, memo))
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getinitargs(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getinitargs__(self):
return (self.foo,)
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return {"foo": self.foo}
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __setstate__(self, state):
self.foo = state["foo"]
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_inst_getstate_setstate(self):
class C:
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return self.foo
def __setstate__(self, state):
self.foo = state
def __eq__(self, other):
return self.foo == other.foo
x = C([42])
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y, x)
self.assertIsNot(y.foo, x.foo)
def test_deepcopy_reflexive_inst(self):
class C:
pass
x = C()
x.foo = x
y = copy.deepcopy(x)
self.assertIsNot(y, x)
self.assertIs(y.foo, y)
# _reconstruct()
def test_reconstruct_string(self):
class C(object):
def __reduce__(self):
return ""
x = C()
y = copy.copy(x)
self.assertIs(y, x)
y = copy.deepcopy(x)
self.assertIs(y, x)
def test_reconstruct_nostate(self):
class C(object):
def __reduce__(self):
return (C, ())
x = C()
x.foo = 42
y = copy.copy(x)
self.assertIs(y.__class__, x.__class__)
y = copy.deepcopy(x)
self.assertIs(y.__class__, x.__class__)
def test_reconstruct_state(self):
class C(object):
def __reduce__(self):
return (C, (), self.__dict__)
def __eq__(self, other):
return self.__dict__ == other.__dict__
x = C()
x.foo = [42]
y = copy.copy(x)
self.assertEqual(y, x)
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y.foo, x.foo)
def test_reconstruct_state_setstate(self):
class C(object):
def __reduce__(self):
return (C, (), self.__dict__)
def __setstate__(self, state):
self.__dict__.update(state)
def __eq__(self, other):
return self.__dict__ == other.__dict__
x = C()
x.foo = [42]
y = copy.copy(x)
self.assertEqual(y, x)
y = copy.deepcopy(x)
self.assertEqual(y, x)
self.assertIsNot(y.foo, x.foo)
def test_reconstruct_reflexive(self):
class C(object):
pass
x = C()
x.foo = x
y = copy.deepcopy(x)
self.assertIsNot(y, x)
self.assertIs(y.foo, y)
# Additions for Python 2.3 and pickle protocol 2
def test_reduce_4tuple(self):
class C(list):
def __reduce__(self):
return (C, (), self.__dict__, iter(self))
def __eq__(self, other):
return (list(self) == list(other) and
self.__dict__ == other.__dict__)
x = C([[1, 2], 3])
y = copy.copy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIs(x[0], y[0])
y = copy.deepcopy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_reduce_5tuple(self):
class C(dict):
def __reduce__(self):
return (C, (), self.__dict__, None, self.items())
def __eq__(self, other):
return (dict(self) == dict(other) and
self.__dict__ == other.__dict__)
x = C([("foo", [1, 2]), ("bar", 3)])
y = copy.copy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIs(x["foo"], y["foo"])
y = copy.deepcopy(x)
self.assertEqual(x, y)
self.assertIsNot(x, y)
self.assertIsNot(x["foo"], y["foo"])
def test_copy_slots(self):
class C(object):
__slots__ = ["foo"]
x = C()
x.foo = [42]
y = copy.copy(x)
self.assertIs(x.foo, y.foo)
def test_deepcopy_slots(self):
class C(object):
__slots__ = ["foo"]
x = C()
x.foo = [42]
y = copy.deepcopy(x)
self.assertEqual(x.foo, y.foo)
self.assertIsNot(x.foo, y.foo)
def test_deepcopy_dict_subclass(self):
class C(dict):
def __init__(self, d=None):
if not d:
d = {}
self._keys = list(d.keys())
super().__init__(d)
def __setitem__(self, key, item):
super().__setitem__(key, item)
if key not in self._keys:
self._keys.append(key)
x = C(d={'foo':0})
y = copy.deepcopy(x)
self.assertEqual(x, y)
self.assertEqual(x._keys, y._keys)
self.assertIsNot(x, y)
x['bar'] = 1
self.assertNotEqual(x, y)
self.assertNotEqual(x._keys, y._keys)
def test_copy_list_subclass(self):
class C(list):
pass
x = C([[1, 2], 3])
x.foo = [4, 5]
y = copy.copy(x)
self.assertEqual(list(x), list(y))
self.assertEqual(x.foo, y.foo)
self.assertIs(x[0], y[0])
self.assertIs(x.foo, y.foo)
def test_deepcopy_list_subclass(self):
class C(list):
pass
x = C([[1, 2], 3])
x.foo = [4, 5]
y = copy.deepcopy(x)
self.assertEqual(list(x), list(y))
self.assertEqual(x.foo, y.foo)
self.assertIsNot(x[0], y[0])
self.assertIsNot(x.foo, y.foo)
def test_copy_tuple_subclass(self):
class C(tuple):
pass
x = C([1, 2, 3])
self.assertEqual(tuple(x), (1, 2, 3))
y = copy.copy(x)
self.assertEqual(tuple(y), (1, 2, 3))
def test_deepcopy_tuple_subclass(self):
class C(tuple):
pass
x = C([[1, 2], 3])
self.assertEqual(tuple(x), ([1, 2], 3))
y = copy.deepcopy(x)
self.assertEqual(tuple(y), ([1, 2], 3))
self.assertIsNot(x, y)
self.assertIsNot(x[0], y[0])
def test_getstate_exc(self):
class EvilState(object):
def __getstate__(self):
raise ValueError("ain't got no stickin' state")
self.assertRaises(ValueError, copy.copy, EvilState())
def test_copy_function(self):
self.assertEqual(copy.copy(global_foo), global_foo)
def foo(x, y): return x+y
self.assertEqual(copy.copy(foo), foo)
bar = lambda: None
self.assertEqual(copy.copy(bar), bar)
def test_deepcopy_function(self):
self.assertEqual(copy.deepcopy(global_foo), global_foo)
def foo(x, y): return x+y
self.assertEqual(copy.deepcopy(foo), foo)
bar = lambda: None
self.assertEqual(copy.deepcopy(bar), bar)
def _check_weakref(self, _copy):
class C(object):
pass
obj = C()
x = weakref.ref(obj)
y = _copy(x)
self.assertIs(y, x)
del obj
y = _copy(x)
self.assertIs(y, x)
def test_copy_weakref(self):
self._check_weakref(copy.copy)
def test_deepcopy_weakref(self):
self._check_weakref(copy.deepcopy)
def _check_copy_weakdict(self, _dicttype):
class C(object):
pass
a, b, c, d = [C() for i in range(4)]
u = _dicttype()
u[a] = b
u[c] = d
v = copy.copy(u)
self.assertIsNot(v, u)
self.assertEqual(v, u)
self.assertEqual(v[a], b)
self.assertEqual(v[c], d)
self.assertEqual(len(v), 2)
del c, d
self.assertEqual(len(v), 1)
x, y = C(), C()
# The underlying containers are decoupled
v[x] = y
self.assertNotIn(x, u)
def test_copy_weakkeydict(self):
self._check_copy_weakdict(weakref.WeakKeyDictionary)
def test_copy_weakvaluedict(self):
self._check_copy_weakdict(weakref.WeakValueDictionary)
def test_deepcopy_weakkeydict(self):
class C(object):
def __init__(self, i):
self.i = i
a, b, c, d = [C(i) for i in range(4)]
u = weakref.WeakKeyDictionary()
u[a] = b
u[c] = d
# Keys aren't copied, values are
v = copy.deepcopy(u)
self.assertNotEqual(v, u)
self.assertEqual(len(v), 2)
self.assertIsNot(v[a], b)
self.assertIsNot(v[c], d)
self.assertEqual(v[a].i, b.i)
self.assertEqual(v[c].i, d.i)
del c
self.assertEqual(len(v), 1)
def test_deepcopy_weakvaluedict(self):
class C(object):
def __init__(self, i):
self.i = i
a, b, c, d = [C(i) for i in range(4)]
u = weakref.WeakValueDictionary()
u[a] = b
u[c] = d
# Keys are copied, values aren't
v = copy.deepcopy(u)
self.assertNotEqual(v, u)
self.assertEqual(len(v), 2)
(x, y), (z, t) = sorted(v.items(), key=lambda pair: pair[0].i)
self.assertIsNot(x, a)
self.assertEqual(x.i, a.i)
self.assertIs(y, b)
self.assertIsNot(z, c)
self.assertEqual(z.i, c.i)
self.assertIs(t, d)
del x, y, z, t
del d
self.assertEqual(len(v), 1)
def test_deepcopy_bound_method(self):
class Foo(object):
def m(self):
pass
f = Foo()
f.b = f.m
g = copy.deepcopy(f)
self.assertEqual(g.m, g.b)
self.assertIs(g.b.__self__, g)
g.b()
def global_foo(x, y): return x+y
def test_main():
support.run_unittest(TestCopy)
if __name__ == "__main__":
test_main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
ArcherSys/ArcherSys
|
Lib/test/test_copy.py
|
Python
|
mit
| 67,430
| 0.002625
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib, os
from .main import read, translate
from .jvm.optimization import options
# Hash outputs of all tests in order to easily detect changes between versions
fullhash = b''
for i in range(1, 8):
name = 'test{}'.format(i)
print(name)
dir = os.path.join('tests', name)
rawdex = read(os.path.join(dir, 'classes.dex'), 'rb')
for bits in range(256):
opts = options.Options(*[bool(bits & (1 << b)) for b in range(8)])
classes, errors = translate(rawdex, opts=opts)
assert not errors
for cls in classes.values():
print('{:08b}'.format(bits), hashlib.sha256(cls).hexdigest())
fullhash = hashlib.sha256(fullhash + cls).digest()
print('done!')
print('Final hash:', hashlib.sha256(fullhash).hexdigest())
|
xtiankisutsa/MARA_Framework
|
tools/enjarify/enjarify/hashtests.py
|
Python
|
lgpl-3.0
| 1,384
| 0.000723
|
#
# BitBake (No)TTY UI Implementation
#
# Handling output to TTYs or files (no TTY)
#
# Copyright (C) 2006-2012 Richard Purdie
#
# SPDX-License-Identifier: GPL-2.0-only
#
from __future__ import division
import os
import sys
import xmlrpc.client as xmlrpclib
import logging
import progressbar
import signal
import bb.msg
import time
import fcntl
import struct
import copy
import atexit
from bb.ui import uihelper
featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
logger = logging.getLogger("BitBake")
interactive = sys.stdout.isatty()
class BBProgress(progressbar.ProgressBar):
def __init__(self, msg, maxval, widgets=None, extrapos=-1, resize_handler=None):
self.msg = msg
self.extrapos = extrapos
if not widgets:
widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
progressbar.ETA()]
self.extrapos = 4
if resize_handler:
self._resize_default = resize_handler
else:
self._resize_default = signal.getsignal(signal.SIGWINCH)
progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets, fd=sys.stdout)
def _handle_resize(self, signum=None, frame=None):
progressbar.ProgressBar._handle_resize(self, signum, frame)
if self._resize_default:
self._resize_default(signum, frame)
def finish(self):
progressbar.ProgressBar.finish(self)
if self._resize_default:
signal.signal(signal.SIGWINCH, self._resize_default)
def setmessage(self, msg):
self.msg = msg
self.widgets[0] = msg
def setextra(self, extra):
if self.extrapos > -1:
if extra:
extrastr = str(extra)
if extrastr[0] != ' ':
extrastr = ' ' + extrastr
else:
extrastr = ''
self.widgets[self.extrapos] = extrastr
def _need_update(self):
# We always want the bar to print when update() is called
return True
class NonInteractiveProgress(object):
fobj = sys.stdout
def __init__(self, msg, maxval):
self.msg = msg
self.maxval = maxval
self.finished = False
def start(self, update=True):
self.fobj.write("%s..." % self.msg)
self.fobj.flush()
return self
def update(self, value):
pass
def finish(self):
if self.finished:
return
self.fobj.write("done.\n")
self.fobj.flush()
self.finished = True
def new_progress(msg, maxval):
if interactive:
return BBProgress(msg, maxval)
else:
return NonInteractiveProgress(msg, maxval)
def pluralise(singular, plural, qty):
if(qty == 1):
return singular % qty
else:
return plural % qty
class InteractConsoleLogFilter(logging.Filter):
def __init__(self, tf, format):
self.tf = tf
self.format = format
def filter(self, record):
if record.levelno == self.format.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")):
return False
self.tf.clearFooter()
return True
class TerminalFilter(object):
rows = 25
columns = 80
def sigwinch_handle(self, signum, frame):
self.rows, self.columns = self.getTerminalColumns()
if self._sigwinch_default:
self._sigwinch_default(signum, frame)
def getTerminalColumns(self):
def ioctl_GWINSZ(fd):
try:
cr = struct.unpack('hh', fcntl.ioctl(fd, self.termios.TIOCGWINSZ, '1234'))
except:
return None
return cr
cr = ioctl_GWINSZ(sys.stdout.fileno())
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr:
try:
cr = (env['LINES'], env['COLUMNS'])
except:
cr = (25, 80)
return cr
def __init__(self, main, helper, console, errconsole, format, quiet):
self.main = main
self.helper = helper
self.cuu = None
self.stdinbackup = None
self.interactive = sys.stdout.isatty()
self.footer_present = False
self.lastpids = []
self.lasttime = None
self.quiet = quiet
if not self.interactive:
return
try:
import curses
except ImportError:
sys.exit("FATAL: The knotty ui could not load the required curses python module.")
import termios
self.curses = curses
self.termios = termios
try:
fd = sys.stdin.fileno()
self.stdinbackup = termios.tcgetattr(fd)
new = copy.deepcopy(self.stdinbackup)
new[3] = new[3] & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSADRAIN, new)
curses.setupterm()
if curses.tigetnum("colors") > 2:
format.enable_color()
self.ed = curses.tigetstr("ed")
if self.ed:
self.cuu = curses.tigetstr("cuu")
try:
self._sigwinch_default = signal.getsignal(signal.SIGWINCH)
signal.signal(signal.SIGWINCH, self.sigwinch_handle)
except:
pass
self.rows, self.columns = self.getTerminalColumns()
except:
self.cuu = None
if not self.cuu:
self.interactive = False
bb.note("Unable to use interactive mode for this terminal, using fallback")
return
if console:
console.addFilter(InteractConsoleLogFilter(self, format))
if errconsole:
errconsole.addFilter(InteractConsoleLogFilter(self, format))
self.main_progress = None
def clearFooter(self):
if self.footer_present:
lines = self.footer_present
sys.stdout.buffer.write(self.curses.tparm(self.cuu, lines))
sys.stdout.buffer.write(self.curses.tparm(self.ed))
sys.stdout.flush()
self.footer_present = False
def elapsed(self, sec):
hrs = int(sec / 3600.0)
sec -= hrs * 3600
min = int(sec / 60.0)
sec -= min * 60
if hrs > 0:
return "%dh%dm%ds" % (hrs, min, sec)
elif min > 0:
return "%dm%ds" % (min, sec)
else:
return "%ds" % (sec)
def keepAlive(self, t):
if not self.cuu:
print("Bitbake still alive (%ds)" % t)
sys.stdout.flush()
def updateFooter(self):
if not self.cuu:
return
activetasks = self.helper.running_tasks
failedtasks = self.helper.failed_tasks
runningpids = self.helper.running_pids
currenttime = time.time()
if not self.lasttime or (currenttime - self.lasttime > 5):
self.helper.needUpdate = True
self.lasttime = currenttime
if self.footer_present and not self.helper.needUpdate:
return
self.helper.needUpdate = False
if self.footer_present:
self.clearFooter()
if (not self.helper.tasknumber_total or self.helper.tasknumber_current == self.helper.tasknumber_total) and not len(activetasks):
return
tasks = []
for t in runningpids:
progress = activetasks[t].get("progress", None)
if progress is not None:
pbar = activetasks[t].get("progressbar", None)
rate = activetasks[t].get("rate", None)
start_time = activetasks[t].get("starttime", None)
if not pbar or pbar.bouncing != (progress < 0):
if progress < 0:
pbar = BBProgress("0: %s (pid %s) " % (activetasks[t]["title"], t), 100, widgets=[progressbar.BouncingSlider(), ''], extrapos=2, resize_handler=self.sigwinch_handle)
pbar.bouncing = True
else:
pbar = BBProgress("0: %s (pid %s) " % (activetasks[t]["title"], t), 100, widgets=[progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=4, resize_handler=self.sigwinch_handle)
pbar.bouncing = False
activetasks[t]["progressbar"] = pbar
tasks.append((pbar, progress, rate, start_time))
else:
start_time = activetasks[t].get("starttime", None)
if start_time:
tasks.append("%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), t))
else:
tasks.append("%s (pid %s)" % (activetasks[t]["title"], t))
if self.main.shutdown:
content = "Waiting for %s running tasks to finish:" % len(activetasks)
print(content)
else:
if self.quiet:
content = "Running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
elif not len(activetasks):
content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
else:
content = "Currently %2s running tasks (%s of %s)" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total)
maxtask = self.helper.tasknumber_total
if not self.main_progress or self.main_progress.maxval != maxtask:
widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()]
self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle)
self.main_progress.start(False)
self.main_progress.setmessage(content)
progress = self.helper.tasknumber_current - 1
if progress < 0:
progress = 0
content = self.main_progress.update(progress)
print('')
lines = 1 + int(len(content) / (self.columns + 1))
if self.quiet == 0:
for tasknum, task in enumerate(tasks[:(self.rows - 2)]):
if isinstance(task, tuple):
pbar, progress, rate, start_time = task
if not pbar.start_time:
pbar.start(False)
if start_time:
pbar.start_time = start_time
pbar.setmessage('%s:%s' % (tasknum, pbar.msg.split(':', 1)[1]))
pbar.setextra(rate)
if progress > -1:
content = pbar.update(progress)
else:
content = pbar.update(1)
print('')
else:
content = "%s: %s" % (tasknum, task)
print(content)
lines = lines + 1 + int(len(content) / (self.columns + 1))
self.footer_present = lines
self.lastpids = runningpids[:]
self.lastcount = self.helper.tasknumber_current
def finish(self):
if self.stdinbackup:
fd = sys.stdin.fileno()
self.termios.tcsetattr(fd, self.termios.TCSADRAIN, self.stdinbackup)
def print_event_log(event, includelogs, loglines, termfilter):
# FIXME refactor this out further
logfile = event.logfile
if logfile and os.path.exists(logfile):
termfilter.clearFooter()
bb.error("Logfile of failure stored in: %s" % logfile)
if includelogs and not event.errprinted:
print("Log data follows:")
f = open(logfile, "r")
lines = []
while True:
l = f.readline()
if l == '':
break
l = l.rstrip()
if loglines:
lines.append(' | %s' % l)
if len(lines) > int(loglines):
lines.pop(0)
else:
print('| %s' % l)
f.close()
if lines:
for line in lines:
print(line)
def _log_settings_from_server(server, observe_only):
# Get values of variables which control our output
includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"])
if error:
logger.error("Unable to get the value of BBINCLUDELOGS variable: %s" % error)
raise BaseException(error)
loglines, error = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
if error:
logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
raise BaseException(error)
if observe_only:
cmd = 'getVariable'
else:
cmd = 'getSetVariable'
consolelogfile, error = server.runCommand([cmd, "BB_CONSOLELOG"])
if error:
logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
raise BaseException(error)
return includelogs, loglines, consolelogfile
_evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.LogRecord",
"bb.build.TaskFailed", "bb.build.TaskBase", "bb.event.ParseStarted",
"bb.event.ParseProgress", "bb.event.ParseCompleted", "bb.event.CacheLoadStarted",
"bb.event.CacheLoadProgress", "bb.event.CacheLoadCompleted", "bb.command.CommandFailed",
"bb.command.CommandExit", "bb.command.CommandCompleted", "bb.cooker.CookerExit",
"bb.event.MultipleProviders", "bb.event.NoProvider", "bb.runqueue.sceneQueueTaskStarted",
"bb.runqueue.runQueueTaskStarted", "bb.runqueue.runQueueTaskFailed", "bb.runqueue.sceneQueueTaskFailed",
"bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent",
"bb.build.TaskProgress", "bb.event.ProcessStarted", "bb.event.ProcessProgress", "bb.event.ProcessFinished"]
def main(server, eventHandler, params, tf = TerminalFilter):
if not params.observe_only:
params.updateToServer(server, os.environ.copy())
includelogs, loglines, consolelogfile = _log_settings_from_server(server, params.observe_only)
if sys.stdin.isatty() and sys.stdout.isatty():
log_exec_tty = True
else:
log_exec_tty = False
helper = uihelper.BBUIHelper()
console = logging.StreamHandler(sys.stdout)
errconsole = logging.StreamHandler(sys.stderr)
format_str = "%(levelname)s: %(message)s"
format = bb.msg.BBLogFormatter(format_str)
if params.options.quiet == 0:
forcelevel = None
elif params.options.quiet > 2:
forcelevel = bb.msg.BBLogFormatter.ERROR
else:
forcelevel = bb.msg.BBLogFormatter.WARNING
bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut, forcelevel)
bb.msg.addDefaultlogFilter(errconsole, bb.msg.BBLogFilterStdErr)
console.setFormatter(format)
errconsole.setFormatter(format)
if not bb.msg.has_console_handler(logger):
logger.addHandler(console)
logger.addHandler(errconsole)
bb.utils.set_process_name("KnottyUI")
if params.options.remote_server and params.options.kill_server:
server.terminateServer()
return
consolelog = None
if consolelogfile and not params.options.show_environment and not params.options.show_versions:
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
conlogformat = bb.msg.BBLogFormatter(format_str)
consolelog = logging.FileHandler(consolelogfile)
bb.msg.addDefaultlogFilter(consolelog)
consolelog.setFormatter(conlogformat)
logger.addHandler(consolelog)
loglink = os.path.join(os.path.dirname(consolelogfile), 'console-latest.log')
bb.utils.remove(loglink)
try:
os.symlink(os.path.basename(consolelogfile), loglink)
except OSError:
pass
llevel, debug_domains = bb.msg.constructLogOptions()
server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
universe = False
if not params.observe_only:
params.updateFromServer(server)
cmdline = params.parseActions()
if not cmdline:
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
return 1
if 'msg' in cmdline and cmdline['msg']:
logger.error(cmdline['msg'])
return 1
if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]:
universe = True
ret, error = server.runCommand(cmdline['action'])
if error:
logger.error("Command '%s' failed: %s" % (cmdline, error))
return 1
elif ret != True:
logger.error("Command '%s' failed: returned %s" % (cmdline, ret))
return 1
parseprogress = None
cacheprogress = None
main.shutdown = 0
interrupted = False
return_value = 0
errors = 0
warnings = 0
taskfailures = []
printinterval = 5000
lastprint = time.time()
termfilter = tf(main, helper, console, errconsole, format, params.options.quiet)
atexit.register(termfilter.finish)
while True:
try:
if (lastprint + printinterval) <= time.time():
termfilter.keepAlive(printinterval)
printinterval += 5000
event = eventHandler.waitEvent(0)
if event is None:
if main.shutdown > 1:
break
termfilter.updateFooter()
event = eventHandler.waitEvent(0.25)
if event is None:
continue
helper.eventHandler(event)
if isinstance(event, bb.runqueue.runQueueExitWait):
if not main.shutdown:
main.shutdown = 1
continue
if isinstance(event, bb.event.LogExecTTY):
if log_exec_tty:
tries = event.retries
while tries:
print("Trying to run: %s" % event.prog)
if os.system(event.prog) == 0:
break
time.sleep(event.sleep_delay)
tries -= 1
if tries:
continue
logger.warning(event.msg)
continue
if isinstance(event, logging.LogRecord):
lastprint = time.time()
printinterval = 5000
if event.levelno >= format.ERROR:
errors = errors + 1
return_value = 1
elif event.levelno == format.WARNING:
warnings = warnings + 1
if event.taskpid != 0:
# For "normal" logging conditions, don't show note logs from tasks
# but do show them if the user has changed the default log level to
# include verbose/debug messages
if event.levelno <= format.NOTE and (event.levelno < llevel or (event.levelno == format.NOTE and llevel != format.VERBOSE)):
continue
# Prefix task messages with recipe/task
if event.taskpid in helper.running_tasks and event.levelno != format.PLAIN:
taskinfo = helper.running_tasks[event.taskpid]
event.msg = taskinfo['title'] + ': ' + event.msg
if hasattr(event, 'fn'):
event.msg = event.fn + ': ' + event.msg
logger.handle(event)
continue
if isinstance(event, bb.build.TaskFailedSilent):
logger.warning("Logfile for failed setscene task is %s" % event.logfile)
continue
if isinstance(event, bb.build.TaskFailed):
return_value = 1
print_event_log(event, includelogs, loglines, termfilter)
if isinstance(event, bb.build.TaskBase):
logger.info(event._message)
continue
if isinstance(event, bb.event.ParseStarted):
if params.options.quiet > 1:
continue
if event.total == 0:
continue
parseprogress = new_progress("Parsing recipes", event.total).start()
continue
if isinstance(event, bb.event.ParseProgress):
if params.options.quiet > 1:
continue
if parseprogress:
parseprogress.update(event.current)
else:
bb.warn("Got ParseProgress event for parsing that never started?")
continue
if isinstance(event, bb.event.ParseCompleted):
if params.options.quiet > 1:
continue
if not parseprogress:
continue
parseprogress.finish()
pasreprogress = None
if params.options.quiet == 0:
print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
% ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
continue
if isinstance(event, bb.event.CacheLoadStarted):
if params.options.quiet > 1:
continue
cacheprogress = new_progress("Loading cache", event.total).start()
continue
if isinstance(event, bb.event.CacheLoadProgress):
if params.options.quiet > 1:
continue
cacheprogress.update(event.current)
continue
if isinstance(event, bb.event.CacheLoadCompleted):
if params.options.quiet > 1:
continue
cacheprogress.finish()
if params.options.quiet == 0:
print("Loaded %d entries from dependency cache." % event.num_entries)
continue
if isinstance(event, bb.command.CommandFailed):
return_value = event.exitcode
if event.error:
errors = errors + 1
logger.error(str(event))
main.shutdown = 2
continue
if isinstance(event, bb.command.CommandExit):
if not return_value:
return_value = event.exitcode
continue
if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)):
main.shutdown = 2
continue
if isinstance(event, bb.event.MultipleProviders):
logger.info(str(event))
continue
if isinstance(event, bb.event.NoProvider):
# For universe builds, only show these as warnings, not errors
if not universe:
return_value = 1
errors = errors + 1
logger.error(str(event))
else:
logger.warning(str(event))
continue
if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring))
continue
if isinstance(event, bb.runqueue.runQueueTaskStarted):
if event.noexec:
tasktype = 'noexec task'
else:
tasktype = 'task'
logger.info("Running %s %d of %d (%s)",
tasktype,
event.stats.completed + event.stats.active +
event.stats.failed + 1,
event.stats.total, event.taskstring)
continue
if isinstance(event, bb.runqueue.runQueueTaskFailed):
return_value = 1
taskfailures.append(event.taskstring)
logger.error(str(event))
continue
if isinstance(event, bb.runqueue.sceneQueueTaskFailed):
logger.warning(str(event))
continue
if isinstance(event, bb.event.DepTreeGenerated):
continue
if isinstance(event, bb.event.ProcessStarted):
if params.options.quiet > 1:
continue
parseprogress = new_progress(event.processname, event.total)
parseprogress.start(False)
continue
if isinstance(event, bb.event.ProcessProgress):
if params.options.quiet > 1:
continue
if parseprogress:
parseprogress.update(event.progress)
else:
bb.warn("Got ProcessProgress event for someting that never started?")
continue
if isinstance(event, bb.event.ProcessFinished):
if params.options.quiet > 1:
continue
if parseprogress:
parseprogress.finish()
parseprogress = None
continue
# ignore
if isinstance(event, (bb.event.BuildBase,
bb.event.MetadataEvent,
bb.event.ConfigParsed,
bb.event.MultiConfigParsed,
bb.event.RecipeParsed,
bb.event.RecipePreFinalise,
bb.runqueue.runQueueEvent,
bb.event.OperationStarted,
bb.event.OperationCompleted,
bb.event.OperationProgress,
bb.event.DiskFull,
bb.event.HeartbeatEvent,
bb.build.TaskProgress)):
continue
logger.error("Unknown event: %s", event)
except EnvironmentError as ioerror:
termfilter.clearFooter()
# ignore interrupted io
if ioerror.args[0] == 4:
continue
sys.stderr.write(str(ioerror))
if not params.observe_only:
_, error = server.runCommand(["stateForceShutdown"])
main.shutdown = 2
except KeyboardInterrupt:
termfilter.clearFooter()
if params.observe_only:
print("\nKeyboard Interrupt, exiting observer...")
main.shutdown = 2
def state_force_shutdown():
print("\nSecond Keyboard Interrupt, stopping...\n")
_, error = server.runCommand(["stateForceShutdown"])
if error:
logger.error("Unable to cleanly stop: %s" % error)
if not params.observe_only and main.shutdown == 1:
state_force_shutdown()
if not params.observe_only and main.shutdown == 0:
print("\nKeyboard Interrupt, closing down...\n")
interrupted = True
# Capture the second KeyboardInterrupt during stateShutdown is running
try:
_, error = server.runCommand(["stateShutdown"])
if error:
logger.error("Unable to cleanly shutdown: %s" % error)
except KeyboardInterrupt:
state_force_shutdown()
main.shutdown = main.shutdown + 1
pass
except Exception as e:
import traceback
sys.stderr.write(traceback.format_exc())
if not params.observe_only:
_, error = server.runCommand(["stateForceShutdown"])
main.shutdown = 2
return_value = 1
try:
termfilter.clearFooter()
summary = ""
if taskfailures:
summary += pluralise("\nSummary: %s task failed:",
"\nSummary: %s tasks failed:", len(taskfailures))
for failure in taskfailures:
summary += "\n %s" % failure
if warnings:
summary += pluralise("\nSummary: There was %s WARNING message shown.",
"\nSummary: There were %s WARNING messages shown.", warnings)
if return_value and errors:
summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
"\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
if summary and params.options.quiet == 0:
print(summary)
if interrupted:
print("Execution was interrupted, returning a non-zero exit code.")
if return_value == 0:
return_value = 1
except IOError as e:
import errno
if e.errno == errno.EPIPE:
pass
if consolelog:
logger.removeHandler(consolelog)
consolelog.close()
return return_value
|
schleichdi2/OPENNFR-6.3-CORE
|
bitbake/lib/bb/ui/knotty.py
|
Python
|
gpl-2.0
| 29,579
| 0.002874
|
import pygame
import numpy
def add_noise(surface):
mask = surface.copy()
mask.set_colorkey((255,255,255))
screen_array = pygame.surfarray.pixels3d(surface)
noise = numpy.random.random((surface.get_width(), surface.get_height())) * 255
screen_array *= noise[:, :, numpy.newaxis]
del screen_array
surface.blit(mask, (0,0))
surface = mask
|
erlehmann/noise-invaders
|
pyspaceinvaders_noise.py
|
Python
|
gpl-2.0
| 371
| 0.013477
|
from pyvows import Vows, expect
from minopt import minopt
@Vows.batch
class Minopt(Vows.Context):
def topic(self):
return minopt(['--baz', 'hello', '--foo', 'bar', '-baz',
'--riff=wobble', '--hey', '-ho', 'world'],
{'string': ['a', 'hey', 'w'],
'boolean': ['baz', 'h', 'o', 'q']})
def should_parse_long_arguments(self, topic):
expect(topic['foo']).to_equal('bar')
expect(topic['riff']).to_equal('wobble')
def should_parse_short_arguments(self, topic):
expected = (True, True)
expect((topic['b'], topic['z'])).to_equal(expected)
def should_parse_booleans(self, topic):
expect(topic['baz']).to_be_true()
expect(topic['h']).to_be_true()
expect(topic['o']).to_be_true()
def should_parse_strings(self, topic):
expect(topic['a']).to_be_empty()
expect(topic['hey']).to_be_empty()
def should_parse_unnamed_args(self, topic):
expect(topic['_']).to_equal(['hello', 'world'])
def should_set_all_args(self, topic):
expected = ('', False)
expect((topic['w'], topic['q'])).to_equal(expected)
|
AjayMT/minopt
|
minopt_vows.py
|
Python
|
mit
| 1,187
| 0
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
# pylint: disable=W0212,W0613
from twisted.internet.defer import Deferred, DeferredList
from twisted.python.failure import Failure
from twisted.trial.unittest import TestCase
import smartanthill.litemq.exchange as ex
from smartanthill.exception import LiteMQResendFailed
class LiteMQCase(TestCase):
g_resent_nums = 0
def test_declare_exchange(self):
for type_, class_ in {"direct": ex.ExchangeDirect,
"fanout": ex.ExchangeFanout}.items():
self.assertIsInstance(
ex.ExchangeFactory().newExchange("exchange_name", type_),
class_
)
self.assertRaises(
AttributeError,
lambda: ex.ExchangeFactory().newExchange("exchange_name",
"unknown-type")
)
def test_queue_ack_success(self):
message, properties = "Test message", {"foo": "bar"}
def _callback(m, p):
self.assertEqual(m, message)
self.assertEqual(p, properties)
return True
def _resback(result):
self.assertIsInstance(result, bool)
self.assertEqual(result, True)
q = ex.Queue("queue_name", "routing_key", _callback, ack=True)
d = q.put(message, properties)
self.assertIsInstance(d, Deferred)
d.addCallbacks(_resback)
return d
def test_queue_ack_fails(self):
self.g_resent_nums, resend_max = 0, 3
def _callback(m, p):
self.g_resent_nums += 1
# test exception
if self.g_resent_nums == 1:
return 1/0
# test "ack-invalid" that is equl to False
else:
return False
def _errback(result):
self.assertIsInstance(result, Failure)
self.assertTrue(result.check(LiteMQResendFailed))
self.assertEqual(resend_max, self.g_resent_nums)
q = ex.Queue("queue_name", "routing_key", _callback, ack=True)
q.RESEND_MAX = resend_max
q.RESEND_DELAY = 0
d = q.put("Test message", {"foo": "bar"})
self.assertIsInstance(d, Deferred)
d.addBoth(_errback)
return d
def test_queue_nonack(self):
self.g_resent_nums, resend_max = 0, 3
def _callback(m, p):
self.g_resent_nums += 1
return 1/0
def _errback(result):
self.assertNotIsInstance(result, Failure)
self.assertIsInstance(result, bool)
self.assertEqual(result, False)
self.assertEqual(self.g_resent_nums, 1)
q = ex.Queue("queue_name", "routing_key", _callback, ack=False)
q.RESEND_MAX = resend_max
q.RESEND_DELAY = 0
d = q.put("Test message", {"foo": "bar"})
self.assertIsInstance(d, Deferred)
d.addBoth(_errback)
return d
def test_exchange_direct(self):
message, properties = "Test message", {"foo": "bar"}
def _callback(m, p):
self.assertEqual(m, message)
self.assertEqual(p, properties)
myex = ex.ExchangeFactory().newExchange("exchange_name", "direct")
myex.bind_queue("queue_name", "routing_key", _callback)
empty_result = myex.publish("invalid_routing_key", message, properties)
self.assertEqual(empty_result, [])
result = myex.publish("routing_key", message, properties)
self.assertIsInstance(result, list)
self.assertEqual(len(result), 1)
d = result[0]
def _resback(result):
self.assertEqual(result, None)
myex.unbind_queue("queue_name")
self.assertEqual(len(myex._queues), 0)
self.assertIsInstance(d, Deferred)
d.addCallbacks(_resback)
return d
def test_exchange_fanout(self):
self.g_resent_nums = 0
message, properties = "Test message", {"foo": "bar"}
def _callback(m, p):
self.g_resent_nums += 1
self.assertEqual(m, message)
self.assertEqual(p, properties)
myex = ex.ExchangeFactory().newExchange("exchange_name", "fanout")
myex.bind_queue("queue_name", "routing_key", _callback)
result = myex.publish("invalid_routing_key", message, properties)
self.assertIsInstance(result, list)
self.assertEqual(len(result), 1)
d1 = result[0]
result = myex.publish("routing_key", message, properties)
self.assertIsInstance(result, list)
self.assertEqual(len(result), 1)
d2 = result[0]
self.assertIsInstance(d1, Deferred)
self.assertIsInstance(d2, Deferred)
dl = DeferredList([d1, d2])
def _resback(result):
self.assertEqual(result, [(True, None), (True, None)])
dl.addCallbacks(_resback)
return dl
|
smartanthill/smartanthill1_0
|
smartanthill/test/test_litemq.py
|
Python
|
mit
| 4,944
| 0
|
"""
Syncronize with OPIMD
General information about opimd is available here (L{http://wiki.openmoko.org/wiki/Opimd}).
This file is part of Pisi.
Pisi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Pisi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Pisi. If not, see <http://www.gnu.org/licenses/>
"""
import os.path
import sys,os,re
import dbus, e_dbus
# Allows us to import contact
sys.path.insert(0,os.path.abspath(__file__+"/../.."))
from contacts import contacts
from pisiconstants import *
import pisiprogress
BUSNAME = "org.freesmartphone.opimd"
PATH_CONTACTS = "/org/freesmartphone/PIM/Contacts"
INTERFACE_CONTACTS = "org.freesmartphone.PIM.Contacts"
INTERFACE_QUERY = "org.freesmartphone.PIM.ContactQuery"
INTERFACE_CONTACT = "org.freesmartphone.PIM.Contact"
INTERFACE_FIELDS = "org.freesmartphone.PIM.Fields"
BACKEND_TYPE_SQLITE = "SQLite-Contacts"
CONF_FIELDSUPPORT = "field_support"
"""Type assignment
Available Types are:
objectpath, phonenumber, address, email, name, date, uri, photo, text, longtext, boolean, timezone, number, integer, generic
See U{http://wiki.openmoko.org/wiki/Opimd_redesign#Changes_in_fields} for details.
"""
TYPE_DEFS = {
"Name": 'name',
"Middlename": 'name',
"Surname": 'name',
"Email": 'email',
"Phone": 'phonenumber',
"Mobile phone": 'phonenumber',
"Home phone": 'phonenumber',
"Work phone": 'phonenumber',
"HomeStreet": 'address',
"HomePostalCode": 'address',
"HomeCity": 'address',
"HomeCountry": 'address',
"HomeState": 'address',
"Organisation": 'text',
"BusinessPostalCode": 'address',
"BusinessStreet": 'address',
"BusinessCity": 'address',
"BusinessCountry": 'address',
"BusinessState": 'address',
"Fax phone": 'phonenumber',
"Title": 'text',
"Departement": 'text',
}
class SynchronizationModule(contacts.AbstractContactSynchronizationModule):
"""
The implementation of the interface L{contacts.AbstractContactSynchronizationModule} for OPIMD persistence backend
"""
def __init__( self, modulesString, config, configsection, folder, verbose=False, soft=False):
"""
Constructor
Super class constructor (L{contacts.AbstractContactSynchronizationModule.__init__}) is called.
Local variables are initialized.
"""
contacts.AbstractContactSynchronizationModule.__init__(self, verbose, soft, modulesString, config, configsection, "OPIMD")
pisiprogress.getCallback().verbose('contact opimd module loaded using file')
try:
mode = config.get(configsection, CONF_FIELDSUPPORT)
self._fieldSupport = mode and mode.lower() == "true"
except:
self._fieldSupport = True
self._idMappingInternalGlobal = {}
self._idMappingGlobalInternal = {}
def _extractValue(self, atts, attName, contactObject, opimdField):
"""
Supporting function to extract a single attribute value
"""
atts[attName] = contactObject.GetContent().get(opimdField)
if not atts[attName]:
atts[attName] = ''
if type(atts[attName]) == list: # return values may be lists when returned; in case simply go for first entry
atts[attName] = atts[attName][0]
def load(self):
"""
Loads all attributes for all contact entries from the OPIMD backend
For each entry a new L{contacts.contacts.Contact} instance is created and stored in the instance dictionary L{contacts.AbstractContactSynchronizationModule._allContacts}.
"""
pisiprogress.getCallback().verbose("OPIMD: Loading")
bus = dbus.SystemBus(mainloop = e_dbus.DBusEcoreMainLoop())
dbusObject = bus.get_object(BUSNAME, PATH_CONTACTS)
contactsInterface = dbus.Interface(dbusObject, dbus_interface= INTERFACE_CONTACTS)
query = contactsInterface.Query({})
dbusObject = bus.get_object(BUSNAME, query)
query = dbus.Interface(dbusObject, dbus_interface=INTERFACE_QUERY)
count = query.GetResultCount()
pisiprogress.getCallback().progress.setProgress(20) # we guess that the actual query took up 20 % of the time - the remaining 80 % are taken by parsing the content ...
pisiprogress.getCallback().update('Loading')
i=0
for contact in query.GetMultipleResults(count):
atts = {}
dbusObject = bus.get_object(BUSNAME, contact.get('Path'))
contactObject = dbus.Interface(dbusObject, dbus_interface= INTERFACE_CONTACT)
self._extractValue(atts, 'firstname', contactObject, 'Name')
self._extractValue(atts, 'middlename', contactObject, 'Middlename')
self._extractValue(atts, 'lastname', contactObject, 'Surname')
self._extractValue(atts, 'email', contactObject, 'E-mail')
self._extractValue(atts, 'mobile', contactObject, 'Phone')
if not atts['mobile']:
self._extractValue(atts, 'mobile', contactObject, 'Mobile phone')
self._extractValue(atts, 'phone', contactObject, 'Home phone')
self._extractValue(atts, 'officePhone', contactObject, 'Work phone')
self._extractValue(atts, 'fax', contactObject, 'Fax phone')
self._extractValue(atts, 'title', contactObject, 'Title')
self._extractValue(atts, 'businessOrganisation', contactObject, 'Organisation')
self._extractValue(atts, 'businessDepartment', contactObject, 'Departement')
self._extractValue(atts, 'businessStreet', contactObject, 'BusinessStreet')
self._extractValue(atts, 'businessPostalCode', contactObject, 'BusinessPostalCode')
self._extractValue(atts, 'businessCity', contactObject, 'BusinessCity')
self._extractValue(atts, 'businessCountry', contactObject, 'BusinessCountry')
self._extractValue(atts, 'businessState', contactObject, 'BusinessState')
self._extractValue(atts, 'homeStreet', contactObject, 'HomeStreet')
self._extractValue(atts, 'homePostalCode', contactObject, 'HomePostalCode')
self._extractValue(atts, 'homeCity', contactObject, 'HomeCity')
self._extractValue(atts, 'homeCountry', contactObject, 'HomeCountry')
self._extractValue(atts, 'homeState', contactObject, 'HomeState')
id = contacts.assembleID(atts)
c = contacts.Contact(id, atts)
self._allContacts[id] = c
self._idMappingGlobalInternal[id] = contact.get('Path')
self._idMappingInternalGlobal[contact.get('Path')] = id
i+=1
pisiprogress.getCallback().progress.setProgress(20 + ((i*80) / count))
pisiprogress.getCallback().update('Loading')
def _checkAndApplyTypes(self):
"""
Makes sure that all required types for PISI are available in OPIMD.
"""
bus = dbus.SystemBus(mainloop = e_dbus.DBusEcoreMainLoop())
dbusObject = bus.get_object(BUSNAME, PATH_CONTACTS)
fields = dbus.Interface(dbusObject, dbus_interface=INTERFACE_FIELDS)
for key, value in TYPE_DEFS.items():
if value: # do not proceed if NONE
try:
fields.AddField(key, value)
except:
pass # don't care about that - we have checked that before; can't add twice
def _saveOneEntry(self, fields, fieldName, contact, attribute):
try:
fields[fieldName] = contact.attributes[attribute]
except KeyError:
pass # opimd does not like empty fields; so skip for now
# fields[fieldName] = ""
# if not fields[fieldName]:
# fields[fieldName] = ""
def _saveOperationAdd(self, id):
"""
Making changes permanent: Add a single contact instance to backend
"""
contact = self.getContact(id)
bus = dbus.SystemBus(mainloop = e_dbus.DBusEcoreMainLoop())
dbusObject = bus.get_object(BUSNAME, PATH_CONTACTS)
contacts = dbus.Interface(dbusObject, dbus_interface=INTERFACE_CONTACTS)
if self._fieldSupport:
self._checkAndApplyTypes()
fields = {}
self._saveOneEntry(fields, 'Name', contact,'firstname' )
self._saveOneEntry(fields, 'Surname', contact, 'lastname')
self._saveOneEntry(fields, 'Middlename', contact,'middlename' )
self._saveOneEntry(fields, 'E-mail', contact, 'email')
self._saveOneEntry(fields, 'Mobile phone', contact, 'mobile')
self._saveOneEntry(fields, 'Work phone', contact, 'officePhone')
self._saveOneEntry(fields, 'Home phone', contact, 'phone')
self._saveOneEntry(fields, 'Fax phone', contact,'fax' )
self._saveOneEntry(fields, 'Title', contact, 'title')
self._saveOneEntry(fields, 'Organisation', contact,'businessOrganisation' )
self._saveOneEntry(fields, 'Departement', contact, 'businessDepartment')
self._saveOneEntry(fields, 'BusinessStreet', contact, 'businessStreet')
self._saveOneEntry(fields, 'BusinessPostalCode', contact, 'businessPostalCode')
self._saveOneEntry(fields, 'BusinessCity', contact, 'businessCity')
self._saveOneEntry(fields, 'BusinessCountry', contact, 'businessCountry')
self._saveOneEntry(fields, 'BusinessState', contact, 'businessState')
self._saveOneEntry(fields, 'HomeStreet', contact,'homeStreet' )
self._saveOneEntry(fields, 'HomePostalCode', contact, 'homePostalCode')
self._saveOneEntry(fields, 'HomeCity', contact,'homeCity' )
self._saveOneEntry(fields, 'HomeCountry', contact, 'homeCountry')
self._saveOneEntry(fields,'HomeState' , contact, 'homeState')
contacts.Add(fields)
def _saveOperationDelete(self, id):
"""
Making changes permanent: Remove a single contact instance from backend
"""
path = self._idMappingGlobalInternal[id]
bus = dbus.SystemBus(mainloop = e_dbus.DBusEcoreMainLoop())
dbusObject = bus.get_object(BUSNAME, path)
contactObject = dbus.Interface(dbusObject, dbus_interface= INTERFACE_CONTACT)
contactObject.Delete()
def _saveOperationModify(self, id):
"""
Making changes permanent: Update a single contact instance in backend
"""
self._saveOperationDelete(id)
self._saveOperationAdd(id)
def saveModifications(self):
"""
Save whatever changes have come by
The history of actions for this data source is iterated. For each item in there the corresponding action is carried out on the item in question.
This function is just a dispatcher to one of the three functions L{_saveOperationAdd}, L{_saveOperationDelete} or L{_saveOperationModify}.
"""
pisiprogress.getCallback().verbose("OPIMD module: I apply %d changes now" %(len(self._history)))
i=0
for listItem in self._history:
action = listItem[0]
id = listItem[1]
if action == ACTIONID_ADD:
pisiprogress.getCallback().verbose( "\t\t<opimd> adding %s" %(id))
self._saveOperationAdd(id)
elif action == ACTIONID_DELETE:
pisiprogress.getCallback().verbose("\t\t<opimd> deleting %s" %(id))
self._saveOperationDelete(id)
elif action == ACTIONID_MODIFY:
pisiprogress.getCallback().verbose("\t\t<opimd> replacing %s" %(id))
self._saveOperationModify(id)
i+=1
pisiprogress.getCallback().progress.setProgress(i * 90 / len(self._history))
pisiprogress.getCallback().update('Storing')
pisiprogress.getCallback().progress.setProgress(100)
pisiprogress.getCallback().update('Storing')
|
kichkasch/pisi
|
modules/contacts_opimd.py
|
Python
|
gpl-3.0
| 12,467
| 0.010588
|
#!/usr/bin/env python
import socket
import struct
import stkuser
from stkutil import running, socklist, updateinfo
STK_SERVER_PORT = 9007
STK_MAX_CLIENTS = 30
STK_MAX_PACKET_SIZE = 65535
STK_MAGIC = 'ST'
STK_VERSION = 0x0001
STK_CLIENT_FLAG = 0x00
STK_SERVER_FLAG = 0x01
STK_END = 0x07
COMMANDS = {
'REQ_LOGIN' : 0x01,
'LOGIN' : 0x02,
'KEEPALIVE' : 0x03,
'LOGOUT' : 0x04,
'GET_USER' : 0x05,
'GET_ONLINE_USER' : 0x06,
'GET_USER_INFO' : 0x07,
'GET_GROUP' : 0x08,
'GET_GROUP_INFO' : 0x09,
'SEND_MSG' : 0x0A,
'REPLY_MSG' : 0x0B,
'SEND_GMSG' : 0x0C,
'REPLY_GMSG' : 0x0D,
'END' : 0xFF
}
# user and group information
user = stkuser.StkUsers()
group = stkuser.StkGroups()
class StkPacket:
sp = { 'magic':'', 'version':0, 'cmd':0, 'sid':0, 'uid':0,
'token':0, 'reserve':0, 'flag':0, 'length':0, 'data':'', 'end':0 }
def __init__(self, buf):
head = buf[:20]
self.sp['data'] = buf[20:-1]
self.sp['end'], = struct.unpack('!B', buf[-1])
self.sp['magic'], \
self.sp['version'], \
self.sp['cmd'], \
self.sp['sid'], \
self.sp['uid'], \
self.sp['token'], \
self.sp['reserve'], \
self.sp['flag'], \
self.sp['length'] = struct.unpack('!2sHHHIIBBH', head)
def check_head_valid(self):
if (self.sp['magic'] != STK_MAGIC \
or self.sp['version'] != STK_VERSION \
or self.sp['flag'] != STK_CLIENT_FLAG \
or self.sp['end'] != STK_END):
return False
else:
return True
def get_stk_uid(self):
return self.sp['uid']
def get_stk_cmd(self):
return self.sp['cmd']
def get_stk_sid(self):
return self.sp['sid']
def get_stk_len(self):
return self.sp['length']
def get_stk_data(self):
return self.sp['data']
def show_stk_head(self):
print 'Magic: %s' % self.sp['magic']
print 'Version: 0x%04x' % self.sp['version']
print 'Command: 0x%04x' % self.sp['cmd']
print 'SessionID: %u' % self.sp['sid']
print 'STKID: %u' % self.sp['uid']
print 'Token: %u' % self.sp['token']
print 'Reserved: %u' % self.sp['reserve']
if self.sp['flag'] == 0:
print 'Client Packet'
else:
print 'Server Packet'
print 'Length: %u' % self.sp['length']
print 'End: 0x%02u' % self.sp['end']
print ''
class StkClient:
sock = None
uid = 0
sid = 0
state = 0
token = 0
reverse = 0
def __init__(self, sock):
self.sock = sock
def stk_set_client(self, uid, sid):
global socklist
self.uid = uid
self.sid = sid
socklist[str(uid)] = self.sock
def stk_get_sock(self):
return self.sock
def stk_reqlog_ack(self, data):
buf = struct.pack('!2sHHHIIBBHB', STK_MAGIC, STK_VERSION, COMMANDS['REQ_LOGIN'], self.sid,
self.uid, self.token, self.reverse, STK_SERVER_FLAG, 0, STK_END)
self.sock.send(buf)
def stk_login_ack(self, data):
global updatedata, socklist, updateinfo
passwd = user.stk_get_pass(self.uid)
result = 0
passtmp, reversetmp = struct.unpack('!32s64s', data)
passnew = passtmp.strip('\000')
if passwd == 'STK_UNKNOWN_USER':
result = 2
elif self.state == 1:
result = 1
elif passwd == passnew:
print 'STK Client %s(%u) is Login in.' % (user.stk_get_nickname(self.uid), self.uid)
self.state = 1
socklist[self.uid] = [self.sock, self.state]
# Notify ui to update
updateinfo.append([self.uid, u'online'])
result = 0
else:
result = 3
buf = struct.pack('!2sHHHIIBBHBB', STK_MAGIC, STK_VERSION, COMMANDS['LOGIN'], self.sid,
self.uid, self.token, self.reverse, STK_SERVER_FLAG, 1, result, STK_END)
self.sock.send(buf)
def stk_keepalive_ack(self, data):
pass
def stk_getuser_ack(self, data):
global user
uids = user.stk_get_uids()
length = 4 * (len(uids) - 1) + 2
buf = struct.pack('!2sHHHIIBBHH', STK_MAGIC, STK_VERSION, COMMANDS['GET_USER'], self.sid,
self.uid, self.token, self.reverse, STK_SERVER_FLAG, length, len(uids)-1)
for k in uids:
if k == self.uid:
pass
else:
buf += struct.pack('!I', k)
buf += struct.pack('!B', STK_END)
self.sock.send(buf)
def stk_getonlineuser_ack(self, data):
pass
def stk_getuserinfo_ack(self, data):
global user
uid, nickname, city, phone, gender = struct.unpack('!I32s16sIB', data)
uinfo = user.stk_get_userinfo(uid)
length = 4 + 32 + 16 + 4 + 1
buf = struct.pack('!2sHHHIIBBHI32s16sIBB', STK_MAGIC, STK_VERSION, COMMANDS['GET_USER_INFO'], self.sid,
self.uid, self.token, self.reverse, STK_SERVER_FLAG, length, uinfo['uid'],
uinfo['nickname'].encode(), uinfo['city'].encode(), uinfo['phone'], uinfo['gender'], STK_END)
self.sock.send(buf)
def stk_getgroup_ack(self, data):
global group
gids = group.stk_get_gids()
length = 4 * len(gids) + 2
buf = struct.pack('!2sHHHIIBBHH', STK_MAGIC, STK_VERSION, COMMANDS['GET_GROUP'], self.sid,
self.uid, self.token, self.reverse, STK_SERVER_FLAG, length, len(gids))
for k in gids:
buf += struct.pack('!I', k)
buf += struct.pack('!B', STK_END)
self.sock.send(buf)
def stk_getgroupinfo_ack(self, data):
global group
gid, gname, membernum = struct.unpack('!I32sH', data)
ginfo = group.stk_get_groupinfo(gid)
members = ginfo['members'].split('-')
length = 4 + 32 + 2 + 4 * len(members)
buf = struct.pack('!2sHHHIIBBHI32sH', STK_MAGIC, STK_VERSION, COMMANDS['GET_GROUP_INFO'], self.sid,
self.uid, self.token, self.reverse, STK_SERVER_FLAG, length, ginfo['gid'],
ginfo['gname'].encode(), len(members))
for k in members:
buf += struct.pack('!I', int(k))
buf += struct.pack('!B', STK_END)
self.sock.send(buf)
def stk_sendmsg_ack(self, data):
global user, socklist
tmp = data[:4]
msg = data[4:]
uid, = struct.unpack('!I', tmp)
length = 4 + len(msg)
buf = struct.pack('!2sHHHIIBBHI', STK_MAGIC, STK_VERSION, COMMANDS['SEND_MSG'], self.sid,
self.uid, self.token, self.reverse, STK_SERVER_FLAG, length, uid)
buf += msg + struct.pack('!B', STK_END)
psock = socklist[uid][0]
if (psock != -1):
psock.send(buf)
else:
print 'Msg From %s(%u) to %s(%u), \n --- but %s is not online.' \
%(user.stk_get_nickname(self.uid), self.uid, \
user.stk_get_nickname(uid), uid, user.stk_get_nickname(uid))
def stk_sendgmsg_ack(self, data):
global group, socklist
tmp = data[:4]
msg = data[4:]
gid, = struct.unpack('!I', tmp)
length = 4 + len(msg)
buf = struct.pack('!2sHHHIIBBHI', STK_MAGIC, STK_VERSION, COMMANDS['SEND_GMSG'], self.sid,
self.uid, self.token, self.reverse, STK_SERVER_FLAG, length, gid)
buf += msg + struct.pack('!B', STK_END)
ginfo = group.stk_get_groupinfo(gid)
members = ginfo['members'].split('-')
for k in members:
if self.uid == int(k):
continue
psock = socklist[int(k)][0]
if (psock != -1):
psock.send(buf)
else:
print 'Msg form %s(%u) by %s(%u), \n --- but %s(%u) is not online.' \
% (group.stk_get_groupname(gid), gid, \
user.stk_get_nickname(self.uid), self.uid, \
user.stk_get_nickname(int(k)), int(k))
def stk_socket_thread(t):
c = t[0]
client = StkClient(c)
while 1:
try:
buf = c.recv(STK_MAX_PACKET_SIZE)
# socket.timeout or socket.error or ...
except socket.timeout:
global running
if running == False:
break;
except socket.error:
# Whatever, error happen, just exit
break;
else:
size = len(buf)
if size == -1:
print 'Recv Socket Error.'
break;
elif size == 0:
print 'Peer Socket Shutdown.'
break;
elif size > STK_MAX_PACKET_SIZE:
print 'Drop Packet(Too Large).'
break;
else:
pass
h = StkPacket(buf)
#h.show_stk_head()
if (h.check_head_valid() != True):
print 'Bad STK Packet.'
continue
cmd = h.get_stk_cmd()
uid = h.get_stk_uid()
sid = h.get_stk_sid()
data = h.get_stk_data()
length = h.get_stk_len()
del h
if cmd == COMMANDS['REQ_LOGIN']:
client.stk_set_client(uid, sid)
client.stk_reqlog_ack(data)
elif cmd == COMMANDS['LOGIN']:
client.stk_login_ack(data)
elif cmd == COMMANDS['KEEPALIVE']:
client.stk_keepalive_ack(data)
elif cmd == COMMANDS['LOGOUT']:
pass
elif cmd == COMMANDS['GET_USER']:
client.stk_getuser_ack(data)
elif cmd == COMMANDS['GET_ONLINE_USER']:
client.stk_getonlineuser_ack(data)
elif cmd == COMMANDS['GET_USER_INFO']:
client.stk_getuserinfo_ack(data)
elif cmd == COMMANDS['GET_GROUP']:
client.stk_getgroup_ack(data)
elif cmd == COMMANDS['GET_GROUP_INFO']:
client.stk_getgroupinfo_ack(data)
elif cmd == COMMANDS['SEND_MSG']:
client.stk_sendmsg_ack(data)
elif cmd == COMMANDS['REPLY_MSG']:
pass
elif cmd == COMMANDS['SEND_GMSG']:
client.stk_sendgmsg_ack(data)
elif cmd == COMMANDS['REPLY_GMSG']:
pass
else:
print 'Unknow Command, Drop.'
pass
c.close
# Notify ui to update
global socklist, updateinfo
socklist[uid] = [None, 0]
updateinfo.append([uid, u'offline'])
print 'Client socket thread exiting...'
|
sharmer/sixtalk
|
stkserver/python/stksocket.py
|
Python
|
gpl-2.0
| 8,920
| 0.042377
|
# -*- coding: utf-8 -*-
#
# Name: Yubico Python Client
# Description: Python class for verifying Yubico One Time Passwords (OTPs).
#
# Author: Tomaž Muraus (http://www.tomaz-muraus.info)
# License: BSD
#
# Requirements:
# - Python >= 2.5
import re
import os
import sys
import time
import socket
import urllib
import urllib2
import hmac
import base64
import hashlib
import threading
import logging
import traceback
from otp import OTP
from yubico_exceptions import *
try:
import httplib_ssl
except ImportError:
httplib_ssl = None
logger = logging.getLogger('face')
FORMAT = '%(asctime)-15s [%(levelname)s] %(message)s'
logging.basicConfig(format=FORMAT)
API_URLS = ('api.yubico.com/wsapi/2.0/verify',
'api2.yubico.com/wsapi/2.0/verify',
'api3.yubico.com/wsapi/2.0/verify',
'api4.yubico.com/wsapi/2.0/verify',
'api5.yubico.com/wsapi/2.0/verify')
DEFAULT_TIMEOUT = 10 # How long to wait before the time out occurs
DEFAULT_MAX_TIME_WINDOW = 40 # How many seconds can pass between the first
# and last OTP generations so the OTP is
# still considered valid (only used in the multi
# mode) default is 5 seconds (40 / 0.125 = 5)
BAD_STATUS_CODES = ['BAD_OTP', 'REPLAYED_OTP', 'BAD_SIGNATURE',
'MISSING_PARAMETER', 'OPERATION_NOT_ALLOWED',
'BACKEND_ERROR', 'NOT_ENOUGH_ANSWERS',
'REPLAYED_REQUEST']
class Yubico():
def __init__(self, client_id, key=None, use_https=True, verify_cert=False,
translate_otp=True):
if use_https and not httplib_ssl:
raise Exception('SSL support not available')
if use_https and httplib_ssl and httplib_ssl.CA_CERTS == '':
raise Exception('If you want to validate server certificate,'
' you need to set CA_CERTS '
'variable in the httplib_ssl.py file pointing '
'to a file which contains a list of trusted CA '
'certificates')
self.client_id = client_id
self.key = base64.b64decode(key) if key is not None else None
self.use_https = use_https
self.verify_cert = verify_cert
self.translate_otp = translate_otp
def verify(self, otp, timestamp=False, sl=None, timeout=None,
return_response=False):
"""
Returns True is the provided OTP is valid,
False if the REPLAYED_OTP status value is returned or the response
message signature verification failed and None for the rest of the
status values.
"""
otp = OTP(otp, self.translate_otp)
nonce = base64.b64encode(os.urandom(30), 'xz')[:25]
query_string = self.generate_query_string(otp.otp, nonce, timestamp,
sl, timeout)
request_urls = self.generate_request_urls()
threads = []
timeout = timeout or DEFAULT_TIMEOUT
for url in request_urls:
thread = URLThread('%s?%s' % (url, query_string), timeout,
self.verify_cert)
thread.start()
threads.append(thread)
# Wait for a first positive or negative response
start_time = time.time()
while threads and (start_time + timeout) > time.time():
for thread in threads:
if not thread.is_alive() and thread.response:
status = self.verify_response(thread.response,
return_response)
if status:
if return_response:
return status
else:
return True
threads.remove(thread)
# Timeout or no valid response received
raise Exception('NO_VALID_ANSWERS')
def verify_multi(self, otp_list=None, max_time_window=None, sl=None,
timeout=None):
# Create the OTP objects
otps = []
for otp in otp_list:
otps.append(OTP(otp, self.translate_otp))
device_ids = set()
for otp in otps:
device_ids.add(otp.device_id)
# Check that all the OTPs contain same device id
if len(device_ids) != 1:
raise Exception('OTPs contain different device ids')
# Now we verify the OTPs and save the server response for each OTP.
# We need the server response, to retrieve the timestamp.
# It's possible to retrieve this value locally, without querying the
# server but in this case, user would need to provide his AES key.
for otp in otps:
response = self.verify(otp.otp, True, sl, timeout,
return_response=True)
if not response:
return False
otp.timestamp = int(response['timestamp'])
count = len(otps)
delta = otps[count - 1].timestamp - otps[0].timestamp
if max_time_window:
max_time_window = (max_time_window / 0.125)
else:
max_time_window = DEFAULT_MAX_TIME_WINDOW
if delta > max_time_window:
raise Exception('More then %s seconds has passed between ' +
'generating the first and the last OTP.' %
(max_time_window * 0.125))
return True
def verify_response(self, response, return_response=False):
"""
Returns True if the OTP is valid (status=OK) and return_response=False,
otherwise (return_response = True) it returns the server response as a
dictionary.
Throws an exception if the OTP is replayed, the server response message
verification failed or the client id is invalid, returns False
otherwise.
"""
try:
status = re.search(r'status=([a-zA-Z0-9_]+)', response) \
.groups()[0]
except (AttributeError, IndexError):
return False
# Secret key is specified, so we verify the response message
# signature
if self.key:
signature, parameters = \
self.parse_parameters_from_response(response)
generated_signature = \
self.generate_message_signature(parameters)
# Signature located in the response does not match the one we
# have generated
if signature != generated_signature:
raise SignatureVerificationError(generated_signature,
signature)
if status == 'OK':
if return_response:
query_string = self.parse_parameters_from_response(response)[1]
response = self.get_parameters_as_dictionary(query_string)
return response
else:
return True
elif status == 'NO_SUCH_CLIENT':
raise InvalidClientIdError(self.client_id)
elif status == 'REPLAYED_OTP':
raise StatusCodeError(status)
return False
def generate_query_string(self, otp, nonce, timestamp=False, sl=None,
timeout=None):
"""
Returns a query string which is sent to the validation servers.
"""
data = [('id', self.client_id),
('otp', otp),
('nonce', nonce)]
if timestamp:
data.append(('timestamp', '1'))
if sl:
if sl not in range(0, 101) and sl not in ['fast', 'secure']:
raise Exception('sl parameter value must be between 0 and '
'100 or string "fast" or "secure"')
data.append(('sl', sl))
if timeout:
data.append(('timeout', timeout))
query_string = urllib.urlencode(data)
if self.key:
hmac_signature = self.generate_message_signature(query_string)
query_string += '&h=%s' % (hmac_signature.replace('+', '%2B'))
return query_string
def generate_message_signature(self, query_string):
"""
Returns a HMAC-SHA-1 signature for the given query string.
http://goo.gl/R4O0E
"""
pairs = query_string.split('&')
pairs = [pair.split('=') for pair in pairs]
pairs_sorted = sorted(pairs)
pairs_string = '&' . join(['=' . join(pair) for pair in pairs_sorted])
digest = hmac.new(self.key, pairs_string, hashlib.sha1).digest()
signature = base64.b64encode(digest)
return signature
def parse_parameters_from_response(self, response):
"""
Returns a response signature and query string generated from the
server response.
"""
splitted = [pair.strip() for pair in response.split('\n')
if pair.strip() != '']
signature = splitted[0].replace('h=', '')
query_string = '&' . join(splitted[1:])
return (signature, query_string)
def get_parameters_as_dictionary(self, query_string):
""" Returns query string parameters as a dictionary. """
dictionary = dict([parameter.split('=') for parameter \
in query_string.split('&')])
return dictionary
def generate_request_urls(self):
"""
Returns a list of the API URLs.
"""
urls = []
for url in API_URLS:
if self.use_https:
url = 'https://%s' % (url)
else:
url = 'http://%s' % (url)
urls.append(url)
return urls
class URLThread(threading.Thread):
def __init__(self, url, timeout, verify_cert):
super(URLThread, self).__init__()
self.url = url
self.timeout = timeout
self.verify_cert = verify_cert
self.request = None
self.response = None
if int(sys.version[0]) == 2 and int(sys.version[2]) <= 5:
self.is_alive = self.isAlive
def run(self):
logger.debug('Sending HTTP request to %s (thread=%s)' % (self.url,
self.name))
socket.setdefaulttimeout(self.timeout)
if self.url.startswith('https') and self.verify_cert:
handler = httplib_ssl.VerifiedHTTPSHandler()
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
try:
self.request = urllib2.urlopen(self.url)
self.response = self.request.read()
except Exception:
self.response = None
logger.debug('Received response from %s (thread=%s): %s' % (self.url,
self.name,
self.response))
|
meddius/yubisaslauthd
|
yubico/yubico.py
|
Python
|
bsd-3-clause
| 11,064
| 0.001085
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import psycopg2
import json
import urllib
import urllib2
import sys
con = None
con = psycopg2.connect("host='54.227.245.32' port='5432' dbname='pmt' user='postgres' password='postgres'")
cnt = 0
sqlFile = sys.argv[1]
print 'Argument List:', str(sys.argv[1])
try:
cur = con.cursor()
for line in file(sqlFile, 'r'):
cnt = cnt + 1
cur.execute(line.strip())
print cnt
con.commit()
except Exception, e:
print 'Error %s' % e
finally:
if con:
con.close()
|
spatialdev/PMT
|
Database/Installation/executeSql.py
|
Python
|
apache-2.0
| 539
| 0.040816
|
'''
Adapted from https://github.com/tornadomeet/ResNet/blob/master/symbol_resnet.py
(Original author Wei Wu) by Antti-Pekka Hynninen
Implementing the original resnet ILSVRC 2015 winning network from:
Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun. "Deep Residual Learning for Image Recognition"
'''
import mxnet as mx
def residual_unit(data, num_filter, stride, dim_match, name, bottle_neck=True, bn_mom=0.9, workspace=256, memonger=False):
"""Return ResNet Unit symbol for building ResNet
Parameters
----------
data : str
Input data
num_filter : int
Number of output channels
bnf : int
Bottle neck channels factor with regard to num_filter
stride : tuple
Stride used in convolution
dim_match : Boolean
True means channel number between input and output is the same, otherwise means differ
name : str
Base name of the operators
workspace : int
Workspace used in convolution operator
"""
if bottle_neck:
conv1 = mx.sym.Convolution(data=data, num_filter=int(num_filter*0.25), kernel=(1,1), stride=stride, pad=(0,0),
no_bias=True, workspace=workspace, name=name + '_conv1')
bn1 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn1')
act1 = mx.sym.Activation(data=bn1, act_type='relu', name=name + '_relu1')
conv2 = mx.sym.Convolution(data=act1, num_filter=int(num_filter*0.25), kernel=(3,3), stride=(1,1), pad=(1,1),
no_bias=True, workspace=workspace, name=name + '_conv2')
bn2 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn2')
act2 = mx.sym.Activation(data=bn2, act_type='relu', name=name + '_relu2')
conv3 = mx.sym.Convolution(data=act2, num_filter=num_filter, kernel=(1,1), stride=(1,1), pad=(0,0), no_bias=True,
workspace=workspace, name=name + '_conv3')
bn3 = mx.sym.BatchNorm(data=conv3, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn3')
if dim_match:
shortcut = data
else:
conv1sc = mx.sym.Convolution(data=data, num_filter=num_filter, kernel=(1,1), stride=stride, no_bias=True,
workspace=workspace, name=name+'_conv1sc')
shortcut = mx.sym.BatchNorm(data=conv1sc, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_sc')
if memonger:
shortcut._set_attr(mirror_stage='True')
return mx.sym.Activation(data=bn3 + shortcut, act_type='relu', name=name + '_relu3')
else:
conv1 = mx.sym.Convolution(data=data, num_filter=num_filter, kernel=(3,3), stride=stride, pad=(1,1),
no_bias=True, workspace=workspace, name=name + '_conv1')
bn1 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, momentum=bn_mom, eps=2e-5, name=name + '_bn1')
act1 = mx.sym.Activation(data=bn1, act_type='relu', name=name + '_relu1')
conv2 = mx.sym.Convolution(data=act1, num_filter=num_filter, kernel=(3,3), stride=(1,1), pad=(1,1),
no_bias=True, workspace=workspace, name=name + '_conv2')
bn2 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, momentum=bn_mom, eps=2e-5, name=name + '_bn2')
if dim_match:
shortcut = data
else:
conv1sc = mx.sym.Convolution(data=data, num_filter=num_filter, kernel=(1,1), stride=stride, no_bias=True,
workspace=workspace, name=name+'_conv1sc')
shortcut = mx.sym.BatchNorm(data=conv1sc, fix_gamma=False, momentum=bn_mom, eps=2e-5, name=name + '_sc')
if memonger:
shortcut._set_attr(mirror_stage='True')
return mx.sym.Activation(data=bn2 + shortcut, act_type='relu', name=name + '_relu3')
def resnet(units, num_stages, filter_list, num_classes, image_shape, bottle_neck=True, bn_mom=0.9, workspace=256, memonger=False):
"""Return ResNet symbol of
Parameters
----------
units : list
Number of units in each stage
num_stages : int
Number of stage
filter_list : list
Channel size of each stage
num_classes : int
Ouput size of symbol
dataset : str
Dataset type, only cifar10 and imagenet supports
workspace : int
Workspace used in convolution operator
"""
num_unit = len(units)
assert(num_unit == num_stages)
data = mx.sym.Variable(name='data')
data = mx.sym.identity(data=data, name='id')
(nchannel, height, width) = image_shape
if height <= 32: # such as cifar10
body = mx.sym.Convolution(data=data, num_filter=filter_list[0], kernel=(3, 3), stride=(1,1), pad=(1, 1),
no_bias=True, name="conv0", workspace=workspace)
# Is this BatchNorm supposed to be here?
body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn0')
else: # often expected to be 224 such as imagenet
body = mx.sym.Convolution(data=data, num_filter=filter_list[0], kernel=(7, 7), stride=(2,2), pad=(3, 3),
no_bias=True, name="conv0", workspace=workspace)
body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn0')
body = mx.sym.Activation(data=body, act_type='relu', name='relu0')
body = mx.symbol.Pooling(data=body, kernel=(3, 3), stride=(2,2), pad=(1,1), pool_type='max')
for i in range(num_stages):
body = residual_unit(body, filter_list[i+1], (1 if i==0 else 2, 1 if i==0 else 2), False,
name='stage%d_unit%d' % (i + 1, 1), bottle_neck=bottle_neck, workspace=workspace,
memonger=memonger)
for j in range(units[i]-1):
body = residual_unit(body, filter_list[i+1], (1,1), True, name='stage%d_unit%d' % (i + 1, j + 2),
bottle_neck=bottle_neck, workspace=workspace, memonger=memonger)
# bn1 = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn1')
# relu1 = mx.sym.Activation(data=bn1, act_type='relu', name='relu1')
# Although kernel is not used here when global_pool=True, we should put one
pool1 = mx.symbol.Pooling(data=body, global_pool=True, kernel=(7, 7), pool_type='avg', name='pool1')
flat = mx.symbol.Flatten(data=pool1)
fc1 = mx.symbol.FullyConnected(data=flat, num_hidden=num_classes, name='fc1')
return mx.symbol.SoftmaxOutput(data=fc1, name='softmax')
def get_symbol(num_classes, num_layers, image_shape, conv_workspace=256, **kwargs):
"""
Adapted from https://github.com/tornadomeet/ResNet/blob/master/symbol_resnet.py
(Original author Wei Wu) by Antti-Pekka Hynninen
Implementing the original resnet ILSVRC 2015 winning network from:
Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun. "Deep Residual Learning for Image Recognition"
"""
image_shape = [int(l) for l in image_shape.split(',')]
(nchannel, height, width) = image_shape
if height <= 28:
num_stages = 3
if (num_layers-2) % 9 == 0 and num_layers >= 164:
per_unit = [(num_layers-2)//9]
filter_list = [16, 64, 128, 256]
bottle_neck = True
elif (num_layers-2) % 6 == 0 and num_layers < 164:
per_unit = [(num_layers-2)//6]
filter_list = [16, 16, 32, 64]
bottle_neck = False
else:
raise ValueError("no experiments done on num_layers {}, you can do it youself".format(num_layers))
units = per_unit * num_stages
else:
if num_layers >= 50:
filter_list = [64, 256, 512, 1024, 2048]
bottle_neck = True
else:
filter_list = [64, 64, 128, 256, 512]
bottle_neck = False
num_stages = 4
if num_layers == 18:
units = [2, 2, 2, 2]
elif num_layers == 34:
units = [3, 4, 6, 3]
elif num_layers == 50:
units = [3, 4, 6, 3]
elif num_layers == 101:
units = [3, 4, 23, 3]
elif num_layers == 152:
units = [3, 8, 36, 3]
elif num_layers == 200:
units = [3, 24, 36, 3]
elif num_layers == 269:
units = [3, 30, 48, 8]
else:
raise ValueError("no experiments done on num_layers {}, you can do it youself".format(num_layers))
return resnet(units = units,
num_stages = num_stages,
filter_list = filter_list,
num_classes = num_classes,
image_shape = image_shape,
bottle_neck = bottle_neck,
workspace = conv_workspace)
|
coder-james/mxnet
|
example/image-classification/symbols/resnet-v1.py
|
Python
|
apache-2.0
| 8,907
| 0.010666
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.forms.util import flatatt, ErrorDict, ErrorList
from django.test import TestCase
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy
class FormsUtilTestCase(TestCase):
# Tests for forms/util.py module.
def test_flatatt(self):
###########
# flatatt #
###########
self.assertEqual(flatatt({'id': "header"}), ' id="header"')
self.assertEqual(flatatt({'class': "news", 'title': "Read this"}), ' class="news" title="Read this"')
self.assertEqual(flatatt({}), '')
def test_validation_error(self):
###################
# ValidationError #
###################
# Can take a string.
self.assertHTMLEqual(str(ErrorList(ValidationError("There was an error.").messages)),
'<ul class="errorlist"><li>There was an error.</li></ul>')
# Can take a unicode string.
self.assertHTMLEqual(unicode(ErrorList(ValidationError("Not \u03C0.").messages)),
'<ul class="errorlist"><li>Not π.</li></ul>')
# Can take a lazy string.
self.assertHTMLEqual(str(ErrorList(ValidationError(ugettext_lazy("Error.")).messages)),
'<ul class="errorlist"><li>Error.</li></ul>')
# Can take a list.
self.assertHTMLEqual(str(ErrorList(ValidationError(["Error one.", "Error two."]).messages)),
'<ul class="errorlist"><li>Error one.</li><li>Error two.</li></ul>')
# Can take a mixture in a list.
self.assertHTMLEqual(str(ErrorList(ValidationError(["First error.", "Not \u03C0.", ugettext_lazy("Error.")]).messages)),
'<ul class="errorlist"><li>First error.</li><li>Not π.</li><li>Error.</li></ul>')
class VeryBadError:
def __unicode__(self): return "A very bad error."
# Can take a non-string.
self.assertHTMLEqual(str(ErrorList(ValidationError(VeryBadError()).messages)),
'<ul class="errorlist"><li>A very bad error.</li></ul>')
# Escapes non-safe input but not input marked safe.
example = 'Example of link: <a href="http://www.example.com/">example</a>'
self.assertHTMLEqual(str(ErrorList([example])),
'<ul class="errorlist"><li>Example of link: <a href="http://www.example.com/">example</a></li></ul>')
self.assertHTMLEqual(str(ErrorList([mark_safe(example)])),
'<ul class="errorlist"><li>Example of link: <a href="http://www.example.com/">example</a></li></ul>')
self.assertHTMLEqual(str(ErrorDict({'name': example})),
'<ul class="errorlist"><li>nameExample of link: <a href="http://www.example.com/">example</a></li></ul>')
self.assertHTMLEqual(str(ErrorDict({'name': mark_safe(example)})),
'<ul class="errorlist"><li>nameExample of link: <a href="http://www.example.com/">example</a></li></ul>')
|
rebost/django
|
tests/regressiontests/forms/tests/util.py
|
Python
|
bsd-3-clause
| 3,188
| 0.008475
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Grappler LayoutOptimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import test
def weight(shape):
"""weights generates a weight of a given shape."""
return random_ops.truncated_normal(shape, seed=0, stddev=0.1)
def bias(shape):
"""bias generates a bias of a given shape."""
return constant_op.constant(0.1, shape=shape)
def conv2d(x, w):
"""conv2d returns a 2d convolution layer with full stride."""
return nn.conv2d(x, w, strides=[1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
"""max_pool_2x2 downsamples a feature map by 2X."""
return nn.max_pool(
x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# Taken from tensorflow/examples/tutorials/mnist/mnist_deep.py
def two_layer_model():
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = weight([5, 5, 1, 32])
b_conv1 = bias([32])
h_conv1 = nn.relu(conv2d(x_image, w_conv1) + b_conv1)
h_pool1 = max_pool_2x2(h_conv1)
w_conv2 = weight([5, 5, 32, 64])
b_conv2 = bias([64])
h_conv2 = nn.relu(conv2d(h_pool1, w_conv2) + b_conv2)
h_pool2 = max_pool_2x2(h_conv2)
return h_pool2
class LayoutOptimizerTest(test.TestCase):
"""Tests the Grappler layout optimizer."""
def testTwoConvLayers(self):
if test.is_gpu_available(cuda_only=True):
output = two_layer_model()
with session.Session() as sess:
output_val_ref = sess.run(output)
rewrite_options = rewriter_config_pb2.RewriterConfig(
optimize_tensor_layout=True)
graph_options = config_pb2.GraphOptions(
rewrite_options=rewrite_options,
build_cost_model=1)
config = config_pb2.ConfigProto(graph_options=graph_options)
with session.Session(config=config) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if node.name.startswith('LayoutOptimizerTranspose'):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self.assertIn('LayoutOptimizerTransposeNHWCToNCHW-Conv2D-Reshape', nodes)
self.assertIn('LayoutOptimizerTransposeNCHWToNHWC-Relu_1-MaxPool_1',
nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
if __name__ == '__main__':
test.main()
|
npuichigo/ttsflow
|
third_party/tensorflow/tensorflow/python/grappler/layout_optimizer_test.py
|
Python
|
apache-2.0
| 3,838
| 0.010422
|
"""United States Macroeconomic data"""
__docformat__ = 'restructuredtext'
COPYRIGHT = """This is public domain."""
TITLE = __doc__
SOURCE = """
Compiled by Skipper Seabold. All data are from the Federal Reserve Bank of St.
Louis [1] except the unemployment rate which was taken from the National
Bureau of Labor Statistics [2]. ::
[1] Data Source: FRED, Federal Reserve Economic Data, Federal Reserve Bank of
St. Louis; http://research.stlouisfed.org/fred2/; accessed December 15,
2009.
[2] Data Source: Bureau of Labor Statistics, U.S. Department of Labor;
http://www.bls.gov/data/; accessed December 15, 2009.
"""
DESCRSHORT = """US Macroeconomic Data for 1959Q1 - 2009Q3"""
DESCRLONG = DESCRSHORT
NOTE = """
Number of Observations - 203
Number of Variables - 14
Variable name definitions::
year - 1959q1 - 2009q3
quarter - 1-4
realgdp - Real gross domestic product (Bil. of chained 2005 US$,
seasonally adjusted annual rate)
realcons - Real personal consumption expenditures (Bil. of chained 2005
US$,
seasonally adjusted annual rate)
realinv - Real gross private domestic investment (Bil. of chained 2005
US$, seasonally adjusted annual rate)
realgovt - Real federal consumption expenditures & gross investment
(Bil. of chained 2005 US$, seasonally adjusted annual rate)
realdpi - Real gross private domestic investment (Bil. of chained 2005
US$, seasonally adjusted annual rate)
cpi - End of the quarter consumer price index for all urban
consumers: all items (1982-84 = 100, seasonally adjusted).
m1 - End of the quarter M1 nominal money stock (Seasonally adjusted)
tbilrate - Quarterly monthly average of the monthly 3-month treasury bill:
secondary market rate
unemp - Seasonally adjusted unemployment rate (%)
pop - End of the quarter total population: all ages incl. armed
forces over seas
infl - Inflation rate (ln(cpi_{t}/cpi_{t-1}) * 400)
realint - Real interest rate (tbilrate - infl)
"""
from numpy import recfromtxt, column_stack, array
from pandas import DataFrame
from scikits.statsmodels.tools import Dataset
from os.path import dirname, abspath
def load():
"""
Load the US macro data and return a Dataset class.
Returns
-------
Dataset instance:
See DATASET_PROPOSAL.txt for more information.
Notes
-----
The macrodata Dataset instance does not contain endog and exog attributes.
"""
data = _get_data()
names = data.dtype.names
dataset = Dataset(data=data, names=names)
return dataset
def load_pandas():
dataset = load()
dataset.data = DataFrame(dataset.data)
return dataset
def _get_data():
filepath = dirname(abspath(__file__))
data = recfromtxt(open(filepath + '/macrodata.csv', 'rb'), delimiter=",",
names=True, dtype=float)
return data
|
wesm/statsmodels
|
scikits/statsmodels/datasets/macrodata/data.py
|
Python
|
bsd-3-clause
| 3,085
| 0.004538
|
import logging
import os
from autotest.client.shared import error
from virttest import data_dir
@error.context_aware
def run_kexec(test, params, env):
"""
Reboot to new kernel through kexec command:
1) Boot guest with x2apic cpu flag.
2) Check x2apic enabled in guest if need.
2) Install a new kernel if only one kernel installed.
3) Reboot to new kernel through kexec command.
4) Check x2apic enabled in guest again if need.
:param test: QEMU test object
:param params: Dictionary with the test parameters
:param env: Dictionary with test environment.
"""
def check_x2apic_flag():
x2apic_enabled = False
error.context("Check x2apic enabled in guest", logging.info)
x2apic_output = session.cmd_output(check_x2apic_cmd).strip()
x2apic_check_string = params.get("x2apic_check_string").split(",")
for check_string in x2apic_check_string:
if check_string.strip() in x2apic_output:
x2apic_enabled = True
if not x2apic_enabled:
raise error.TestFail("x2apic is not enabled in guest.")
def install_new_kernel():
error.context("Install a new kernel in guest", logging.info)
try:
# pylint: disable=E0611
from qemu.tests import rh_kernel_update
rh_kernel_update.run_rh_kernel_update(test, params, env)
except Exception, detail:
raise error.TestError("Failed to install a new kernel in "
"guest: %s" % detail)
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
login_timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=login_timeout)
cmd_timeout = int(params.get("cmd_timeout", 360))
check_x2apic = params.get("check_x2apic", "yes")
check_x2apic_cmd = params.get("check_x2apic_cmd")
if "yes" in check_x2apic:
check_x2apic_flag()
cmd = params.get("kernel_count_cmd")
count = session.cmd_output(cmd, timeout=cmd_timeout)
kernel_num = int(count)
if kernel_num <= 1:
# need install a new kernel
install_new_kernel()
session = vm.wait_for_login(timeout=login_timeout)
count = session.cmd_output(cmd, timeout=cmd_timeout)
if int(count) <= 1:
raise error.TestError("Could not find a new kernel "
"after rh_kernel_update.")
check_cur_kernel_cmd = params.get("check_cur_kernel_cmd")
cur_kernel_version = session.cmd_output(check_cur_kernel_cmd).strip()
logging.info("Current kernel is: %s" % cur_kernel_version)
cmd = params.get("check_installed_kernel")
output = session.cmd_output(cmd, timeout=cmd_timeout)
kernels = output.split()
new_kernel = None
for kernel in kernels:
kernel = kernel.strip()
if cur_kernel_version not in kernel:
new_kernel = kernel[7:]
if not new_kernel:
raise error.TestError("Could not find new kernel, "
"command line output: %s" % output)
msg = "Reboot to kernel %s through kexec" % new_kernel
error.context(msg, logging.info)
cmd = params.get("get_kernel_image") % new_kernel
kernel_file = session.cmd_output(cmd).strip().splitlines()[0]
cmd = params.get("get_kernel_ramdisk") % new_kernel
init_file = session.cmd_output(cmd).strip().splitlines()[0]
cmd = params.get("load_kernel_cmd") % (kernel_file, init_file)
session.cmd_output(cmd, timeout=cmd_timeout)
cmd = params.get("kexec_reboot_cmd")
session.sendline(cmd)
session = vm.wait_for_login(timeout=login_timeout)
kernel = session.cmd_output(check_cur_kernel_cmd).strip()
logging.info("Current kernel is: %s" % kernel)
if kernel.strip() != new_kernel.strip():
raise error.TestFail("Fail to boot to kernel %s, current kernel is %s"
% (new_kernel, kernel))
if "yes" in check_x2apic:
check_x2apic_flag()
session.close()
|
spiceqa/virt-test
|
qemu/tests/kexec.py
|
Python
|
gpl-2.0
| 4,019
| 0
|
# Copyright 2020 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Ignore indention messages, since legacy scripts use 2 spaces instead of 4.
# pylint: disable=bad-indentation,docstring-section-indent
# pylint: disable=docstring-trailing-quotes
"""Helper class to facilitate communication to servo ec console."""
from ecusb import pty_driver
from ecusb import stm32uart
class TinyServod(object):
"""Helper class to wrap a pty_driver with interface."""
def __init__(self, vid, pid, interface, serialname=None, debug=False):
"""Build the driver and interface.
Args:
vid: servo device vid
pid: servo device pid
interface: which usb interface the servo console is on
serialname: the servo device serial (if available)
"""
self._vid = vid
self._pid = pid
self._interface = interface
self._serial = serialname
self._debug = debug
self._init()
def _init(self):
self.suart = stm32uart.Suart(vendor=self._vid,
product=self._pid,
interface=self._interface,
serialname=self._serial,
debuglog=self._debug)
self.suart.run()
self.pty = pty_driver.ptyDriver(self.suart, [])
def reinitialize(self):
"""Reinitialize the connect after a reset/disconnect/etc."""
self.close()
self._init()
def close(self):
"""Close out the connection and release resources.
Note: if another TinyServod process or servod itself needs the same device
it's necessary to call this to ensure the usb device is available.
"""
self.suart.close()
|
coreboot/chrome-ec
|
extra/tigertool/ecusb/tiny_servod.py
|
Python
|
bsd-3-clause
| 1,767
| 0.00283
|
# Copyright 2019 Oihane Crucelaegui - AvanzOSC
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from datetime import datetime, timedelta
from pytz import timezone, utc
from odoo import api, fields, models
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
hour_gap = fields.Float(
string='Weekly Hour Gap', compute='_compute_hour_gap')
@api.depends('attendance_ids', 'attendance_ids.hour_from',
'attendance_ids.hour_to', 'attendance_ids.dayofweek',
'attendance_ids.date_from', 'attendance_ids.date_to')
def _compute_hour_gap(self):
today = fields.Date.context_today(self)
year, week_num, day_of_week = today.isocalendar()
start_dt = datetime.strptime(
'{}-W{}-1'.format(year, week_num-1), "%Y-W%W-%w").replace(
tzinfo=utc)
end_dt = start_dt + timedelta(days=7, seconds=-1)
for record in self:
# Set timezone in UTC if no timezone is explicitly given
if record.tz:
tz = timezone((record or self).tz)
start_dt = start_dt.replace(tzinfo=tz)
end_dt = end_dt.replace(tzinfo=tz)
record.hour_gap = record.get_work_hours_count(
start_dt, end_dt, compute_leaves=False)
class ResourceCalendarAttendance(models.Model):
_inherit = 'resource.calendar.attendance'
hour_gap = fields.Float(
string='Hour Gap', compute='_compute_hour_gap', store=True)
delay = fields.Integer(
string='Allowed hours extend entry', default=0)
delay_to = fields.Integer(
string='Allowed hours extend departure', default=0)
delay_hour_from = fields.Float(
string='Work from', compute='_compute_delay_hour_from_to', store=True)
delay_hour_to = fields.Float(
string='Work to', compute='_compute_delay_hour_from_to', store=True)
night_shift = fields.Boolean(string='Night shift', default=False)
@api.depends('hour_from', 'hour_to')
def _compute_hour_gap(self):
for record in self:
record.hour_gap = record.hour_to - record.hour_from
@api.depends('hour_from', 'hour_to', 'delay', 'delay_to')
def _compute_delay_hour_from_to(self):
for record in self:
delay = record.delay if record.delay > 0 else record.delay * -1
delay_to = (record.delay_to if record.delay_to > 0 else
record.delay_to * -1)
record.delay_hour_from = record.hour_from - delay
record.delay_hour_to = record.hour_to + delay_to
|
oihane/odoo-addons
|
resource_time/models/resource_calendar.py
|
Python
|
agpl-3.0
| 2,606
| 0
|
# coding: utf-8
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from registration import forms as registration_forms
from kobo.static_lists import SECTORS, COUNTRIES
USERNAME_REGEX = r'^[a-z][a-z0-9_]+$'
USERNAME_MAX_LENGTH = 30
USERNAME_INVALID_MESSAGE = _(
'Usernames must be between 2 and 30 characters in length, '
'and may only consist of lowercase letters, numbers, '
'and underscores, where the first character must be a letter.'
)
class RegistrationForm(registration_forms.RegistrationForm):
username = forms.RegexField(
regex=USERNAME_REGEX,
max_length=USERNAME_MAX_LENGTH,
label=_("Username"),
error_messages={'invalid': USERNAME_INVALID_MESSAGE}
)
name = forms.CharField(
label=_('Name'),
required=False,
)
organization = forms.CharField(
label=_('Organization name'),
required=False,
)
gender = forms.ChoiceField(
label=_('Gender'),
required=False,
widget=forms.RadioSelect,
choices=(
('male', _('Male')),
('female', _('Female')),
('other', _('Other')),
)
)
sector = forms.ChoiceField(
label=_('Sector'),
required=False,
choices=(('', ''),
) + SECTORS,
)
country = forms.ChoiceField(
label=_('Country'),
required=False,
choices=(('', ''),) + COUNTRIES,
)
class Meta:
model = User
fields = [
'name',
'organization',
'username',
'email',
'sector',
'country',
'gender',
# The 'password' field appears without adding it here; adding it
# anyway results in a duplicate
]
|
onaio/kpi
|
kpi/forms.py
|
Python
|
agpl-3.0
| 1,879
| 0.000532
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-04-23 05:02
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('wallet', '0002_auto_20160418_0334'),
]
operations = [
migrations.RenameField(
model_name='walletmasterkeys',
old_name='encrypted_seed',
new_name='encrypted_mnemonic',
),
]
|
priestc/MultiExplorer
|
multiexplorer/wallet/migrations/0003_auto_20160423_0502.py
|
Python
|
mit
| 456
| 0
|
#from satpy import Scene
from satpy.utils import debug_on
debug_on()
#from glob import glob
#base_dir="/data/COALITION2/database/meteosat/radiance_HRIT/case-studies/2015/07/07/"
#import os
#os.chdir(base_dir)
#filenames = glob("*201507071200*__")
#print base_dir
#print filenames
##global_scene = Scene(reader="hrit_msg", filenames=filenames, base_dir=base_dir, ppp_config_dir="/opt/users/hau/PyTroll//cfg_offline/")
#global_scene = Scene(reader="hrit_msg", filenames=filenames, base_dir=base_dir, ppp_config_dir="/opt/users/hau/PyTroll/packages/satpy/satpy/etc")
#from satpy import available_readers
#available_readers()
# new version of satpy after 0.8
#################################
from satpy import find_files_and_readers, Scene
from datetime import datetime
import numpy as np
show_details=False
save_overview=True
files_sat = find_files_and_readers(sensor='seviri',
start_time=datetime(2015, 7, 7, 12, 0),
end_time=datetime(2015, 7, 7, 12, 0),
base_dir="/data/COALITION2/database/meteosat/radiance_HRIT/case-studies/2015/07/07/",
reader="seviri_l1b_hrit")
#print files_sat
#files = dict(files_sat.items() + files_nwc.items())
files = dict(files_sat.items())
global_scene = Scene(filenames=files) # not allowed any more: reader="hrit_msg",
print dir(global_scene)
#global_scene.load([0.6, 0.8, 10.8])
#global_scene.load(['IR_120', 'IR_134'])
if save_overview:
global_scene.load(['overview',0.6, 0.8])
else:
global_scene.load([0.6,0.8])
#print(global_scene[0.6]) # works only if you load also the 0.6 channel, but not an RGB that contains the 0.6
#!!# print(global_scene['overview']) ### this one does only work in the develop version
global_scene.available_dataset_names()
global_scene["ndvi"] = (global_scene[0.8] - global_scene[0.6]) / (global_scene[0.8] + global_scene[0.6])
# !!! BUG: will not be resampled in global_scene.resample(area)
#from satpy import DatasetID
#my_channel_id = DatasetID(name='IR_016', calibration='radiance')
#global_scene.load([my_channel_id])
#print(scn['IR_016'])
#area="eurol"
#area="EuropeCanaryS95"
area="ccs4"
local_scene = global_scene.resample(area)
if show_details:
help(local_scene)
print global_scene.available_composite_ids()
print global_scene.available_composite_names()
print global_scene.available_dataset_names()
print global_scene.available_writers()
if save_overview:
#local_scene.show('overview')
local_scene.save_dataset('overview', './overview_'+area+'.png', overlay={'coast_dir': '/data/OWARNA/hau/maps_pytroll/', 'color': (255, 255, 255), 'resolution': 'i'})
print 'display ./overview_'+area+'.png &'
local_scene["ndvi"] = (local_scene[0.8] - local_scene[0.6]) / (local_scene[0.8] + local_scene[0.6])
#local_scene["ndvi"].area = local_scene[0.8].area
print "local_scene[\"ndvi\"].min()", local_scene["ndvi"].compute().min()
print "local_scene[\"ndvi\"].max()", local_scene["ndvi"].compute().max()
lsmask_file="/data/COALITION2/database/LandSeaMask/SEVIRI/LandSeaMask_"+area+".nc"
from netCDF4 import Dataset
ncfile = Dataset(lsmask_file,'r')
# Read variable corresponding to channel name
lsmask = ncfile.variables['lsmask'][:,:] # attention [:,:] or [:] is really necessary
import dask.array as da
#print 'type(local_scene["ndvi"].data)', type(local_scene["ndvi"].data), local_scene["ndvi"].data.compute().shape
#print "type(lsmask)", type(lsmask), lsmask.shape, lsmask[:,:,0].shape,
#local_scene["ndvi"].data.compute()[lsmask[:,:,0]==0]=np.nan
ndvi_numpyarray=local_scene["ndvi"].data.compute()
if area=="EuropeCanaryS95":
ndvi_numpyarray[lsmask[::-1,:,0]==0]=np.nan
else:
ndvi_numpyarray[lsmask[:,:,0]==0]=np.nan
local_scene["ndvi"].data = da.from_array(ndvi_numpyarray, chunks='auto')
#local_scene["ndvi"].data = local_scene["ndvi"].data.where(lsmask!=0)
colorized=True
if not colorized:
#local_scene.save_dataset('ndvi', './ndvi_'+area+'.png')
local_scene.save_dataset('ndvi', './ndvi_'+area+'.png', overlay={'coast_dir': '/data/OWARNA/hau/maps_pytroll/', 'color': (255, 255, 255), 'resolution': 'i'})
#print dir(local_scene.save_dataset)
else:
# https://github.com/pytroll/satpy/issues/459
# from satpy.enhancements import colorize
# colorize(img, **kwargs)
# 'ylgn'
# https://satpy.readthedocs.io/en/latest/writers.html
# nice NDVI colourbar here:
# https://www.researchgate.net/figure/NDVI-maps-Vegetation-maps-created-by-measuring-the-Normalized-Vegetation-Difference_fig7_323885082
from satpy.composites import BWCompositor
from satpy.enhancements import colorize
from satpy.writers import to_image
compositor = BWCompositor("test", standard_name="ndvi")
composite = compositor((local_scene["ndvi"], ))
img = to_image(composite)
#from trollimage import colormap
#dir(colormap)
# 'accent', 'blues', 'brbg', 'bugn', 'bupu', 'colorbar', 'colorize', 'dark2', 'diverging_colormaps', 'gnbu', 'greens',
# 'greys', 'hcl2rgb', 'np', 'oranges', 'orrd', 'paired', 'palettebar', 'palettize', 'pastel1', 'pastel2', 'piyg', 'prgn',
# 'pubu', 'pubugn', 'puor', 'purd', 'purples', 'qualitative_colormaps', 'rainbow', 'rdbu', 'rdgy', 'rdpu', 'rdylbu', 'rdylgn',
# 'reds', 'rgb2hcl', 'sequential_colormaps', 'set1', 'set2', 'set3', 'spectral', 'ylgn', 'ylgnbu', 'ylorbr', 'ylorrd'
# kwargs = {"palettes": [{"colors": 'ylgn',
# "min_value": -0.1, "max_value": 0.9}]}
#arr = np.array([[230, 227, 227], [191, 184, 162], [118, 148, 61], [67, 105, 66], [5, 55, 8]])
arr = np.array([ [ 95, 75, 49], [210, 175, 131], [118, 148, 61], [67, 105, 66], [28, 29, 4]])
np.save("/tmp/binary_colormap.npy", arr)
kwargs = {"palettes": [{"filename": "/tmp/binary_colormap.npy",
"min_value": -0.1, "max_value": 0.8}]}
colorize(img, **kwargs)
from satpy.writers import add_decorate, add_overlay
decorate = {
'decorate': [
{'logo': {'logo_path': '/opt/users/common/logos/meteoSwiss.png', 'height': 60, 'bg': 'white','bg_opacity': 255, 'align': {'top_bottom': 'top', 'left_right': 'right'}}},
{'text': {'txt': ' MSG, '+local_scene.start_time.strftime('%Y-%m-%d %H:%MUTC')+', '+ area+', NDVI',
'align': {'top_bottom': 'top', 'left_right': 'left'},
'font': "/usr/openv/java/jre/lib/fonts/LucidaTypewriterBold.ttf",
'font_size': 19,
'height': 25,
'bg': 'white',
'bg_opacity': 0,
'line': 'white'}}
]
}
img = add_decorate(img, **decorate) #, fill_value='black'
img = add_overlay(img, area, '/data/OWARNA/hau/maps_pytroll/', color='red', width=0.5, resolution='i', level_coast=1, level_borders=1, fill_value=None)
#from satpy.writers import compute_writer_results
#res1 = scn.save_datasets(filename="/tmp/{name}.png",
# writer='simple_image',
# compute=False)
#res2 = scn.save_datasets(filename="/tmp/{name}.tif",
# writer='geotiff',
# compute=False)
#results = [res1, res2]
#compute_writer_results(results)
#img.show()
img.save('./ndvi_'+area+'.png')
print 'display ./ndvi_'+area+'.png &'
|
meteoswiss-mdr/monti-pytroll
|
scripts/demo_satpy_ndvi_decorate.py
|
Python
|
lgpl-3.0
| 7,498
| 0.015071
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
import stevedore
from keystoneauth1 import exceptions
PLUGIN_NAMESPACE = 'keystoneauth1.plugin'
__all__ = ['get_available_plugin_names',
'get_available_plugin_loaders',
'get_plugin_loader',
'get_plugin_options',
'BaseLoader',
'PLUGIN_NAMESPACE']
def get_available_plugin_names():
"""Get the names of all the plugins that are available on the system.
This is particularly useful for help and error text to prompt a user for
example what plugins they may specify.
:returns: A list of names.
:rtype: frozenset
"""
mgr = stevedore.ExtensionManager(namespace=PLUGIN_NAMESPACE)
return frozenset(mgr.names())
def get_available_plugin_loaders():
"""Retrieve all the plugin classes available on the system.
:returns: A dict with plugin entrypoint name as the key and the plugin
loader as the value.
:rtype: dict
"""
mgr = stevedore.ExtensionManager(namespace=PLUGIN_NAMESPACE,
invoke_on_load=True,
propagate_map_exceptions=True)
return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))
def get_plugin_loader(name):
"""Retrieve a plugin class by its entrypoint name.
:param str name: The name of the object to get.
:returns: An auth plugin class.
:rtype: :py:class:`keystoneauth1.loading.BaseLoader`
:raises keystonauth.exceptions.NoMatchingPlugin: if a plugin cannot be
created.
"""
try:
mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,
invoke_on_load=True,
name=name)
except RuntimeError:
raise exceptions.NoMatchingPlugin(name)
return mgr.driver
def get_plugin_options(name):
"""Get the options for a specific plugin.
This will be the list of options that is registered and loaded by the
specified plugin.
:returns: A list of :py:class:`keystoneauth1.loading.Opt` options.
:raises keystonauth.exceptions.NoMatchingPlugin: if a plugin cannot be
created.
"""
return get_plugin_loader(name).get_options()
@six.add_metaclass(abc.ABCMeta)
class BaseLoader(object):
@abc.abstractproperty
def plugin_class(self):
raise NotImplemented()
@abc.abstractmethod
def get_options(self):
"""Return the list of parameters associated with the auth plugin.
This list may be used to generate CLI or config arguments.
:returns: A list of Param objects describing available plugin
parameters.
:rtype: list
"""
return []
def load_from_options(self, **kwargs):
"""Create a plugin from the arguments retrieved from get_options.
A client can override this function to do argument validation or to
handle differences between the registered options and what is required
to create the plugin.
"""
missing_required = [o for o in self.get_options()
if o.required and kwargs.get(o.dest) is None]
if missing_required:
raise exceptions.MissingRequiredOptions(missing_required)
return self.plugin_class(**kwargs)
|
sjsucohort6/openstack
|
python/venv/lib/python2.7/site-packages/keystoneauth1/loading/base.py
|
Python
|
mit
| 3,953
| 0
|
# Copyright (c) 2015-2016 Western Digital Corporation or its affiliates.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
# Author: Chaitanya Kulkarni <chaitanya.kulkarni@hgst.com>
#
"""
NVMe Compare Command Testcase:-
1. Create a data file 1 with pattern 1515 to write.
2. Create a data file 2 with pattern 2525 to compare with.
3. Write a block of data pattern using data file1.
4. Compare written block to data file 2's pattern; shall fail.
5. Compare written block to data file1's pattern; shall pass.
"""
from nose.tools import assert_equal, assert_not_equal
from nvme_test_io import TestNVMeIO
class TestNVMeCompareCmd(TestNVMeIO):
"""
Represents Compare Testcase. Inherits TestNVMeIO class.
- Attributes:
- data_size : data size to perform IO.
- start_block : starting block of to perform IO.
- compare_file : data file to use in nvme compare command.
- test_log_dir : directory for logs, temp files.
"""
def __init__(self):
""" Pre Section for TestNVMeCompareCmd """
TestNVMeIO.__init__(self)
self.data_size = 1024
self.start_block = 1023
self.setup_log_dir(self.__class__.__name__)
self.compare_file = self.test_log_dir + "/" + "compare_file.txt"
self.write_file = self.test_log_dir + "/" + self.write_file
self.create_data_file(self.write_file, self.data_size, "15")
self.create_data_file(self.compare_file, self.data_size, "25")
def __del__(self):
""" Post Section for TestNVMeCompareCmd """
TestNVMeIO.__del__(self)
def nvme_compare(self, cmp_file):
""" Wrapper for nvme compare command.
- Args:
- cmp_file : data file used in nvme compare command.
- Returns:
- return code of the nvme compare command.
"""
compare_cmd = "nvme compare " + self.ns1 + " --start-block=" + \
str(self.start_block) + " --block-count=" + \
str(self.block_count) + " --data-size=" + \
str(self.data_size) + " --data=" + cmp_file
return self.exec_cmd(compare_cmd)
def test_nvme_compare(self):
""" Testcase main """
assert_equal(self.nvme_write(), 0)
assert_not_equal(self.nvme_compare(self.compare_file), 0)
assert_equal(self.nvme_compare(self.write_file), 0)
|
samiWaheed/nvme-cli
|
tests/nvme_compare_test.py
|
Python
|
gpl-2.0
| 3,106
| 0
|
#!/usr/bin/env python
import os
from django.core import management
os.environ['DJANGO_SETTINGS_MODULE'] = 'kgadmin.conf.settings'
if __name__ == "__main__":
management.execute_from_command_line()
|
Karaage-Cluster/karaage-admin
|
manage.py
|
Python
|
gpl-3.0
| 201
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from wtforms import validators
from jinja2 import Markup
from studio.core.engines import db
from riitc.models import NaviModel, ChannelModel
from .base import BaseView
from .forms import CKTextAreaField
class Navi(BaseView):
column_labels = {'name': '名称', 'channels': '频道列表'}
column_list = ['id', 'name', 'channels', 'date_created']
def _list_channels(self, context, model, name):
lis = ''
for channel in model.channels:
lis += '<li>%s</li>' % (channel)
return Markup('<ol>' + lis + '</ol>')
column_formatters = {
'channels': _list_channels,
}
def __init__(self, **kwargs):
super(Navi, self).__init__(NaviModel, db.session, **kwargs)
def create_form(self, obj=None):
form = super(Navi, self).create_form()
delattr(form, 'date_created')
return form
def edit_form(self, obj=None):
form = super(Navi, self).edit_form(obj=obj)
delattr(form, 'date_created')
return form
class Channel(BaseView):
create_template = 'panel/channel_edit.html'
edit_template = 'panel/channel_edit.html'
column_labels = {'name': '名称',
'parent': '主分类(本身为主分类,不填写)',
'summary': '简介',
'date_created': '创建时间'}
column_searchable_list = ['name', ]
column_default_sort = ('date_created', True)
form_extra_fields = {
'summary': CKTextAreaField('简介',
validators=[validators.Required()]),
}
def __init__(self, **kwargs):
super(Channel, self).__init__(ChannelModel, db.session, **kwargs)
def create_form(self, obj=None):
form = super(Channel, self).create_form()
delattr(form, 'articles')
delattr(form, 'channels')
delattr(form, 'all_articles')
delattr(form, 'date_created')
return form
def edit_form(self, obj=None):
form = super(Channel, self).edit_form(obj=obj)
delattr(form, 'articles')
delattr(form, 'channels')
delattr(form, 'all_articles')
delattr(form, 'date_created')
return form
|
qisanstudio/qsapp-riitc
|
src/riitc/panel/channel.py
|
Python
|
mit
| 2,269
| 0
|
from __future__ import unicode_literals
from django.apps import AppConfig
class IngestConfig(AppConfig):
name = 'ingest'
|
IQSS/miniverse
|
dv_apps/ingest/apps.py
|
Python
|
mit
| 128
| 0
|
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import btm_parameter_boolean144_all_of
except ImportError:
btm_parameter_boolean144_all_of = sys.modules[
"onshape_client.oas.models.btm_parameter_boolean144_all_of"
]
try:
from onshape_client.oas.models import btp_literal253
except ImportError:
btp_literal253 = sys.modules["onshape_client.oas.models.btp_literal253"]
try:
from onshape_client.oas.models import btp_space10
except ImportError:
btp_space10 = sys.modules["onshape_client.oas.models.btp_space10"]
class BTPLiteralBoolean255(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("documentation_type",): {
"FUNCTION": "FUNCTION",
"PREDICATE": "PREDICATE",
"CONSTANT": "CONSTANT",
"ENUM": "ENUM",
"USER_TYPE": "USER_TYPE",
"FEATURE_DEFINITION": "FEATURE_DEFINITION",
"FILE_HEADER": "FILE_HEADER",
"UNDOCUMENTABLE": "UNDOCUMENTABLE",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"value": (bool,), # noqa: E501
"atomic": (bool,), # noqa: E501
"documentation_type": (str,), # noqa: E501
"end_source_location": (int,), # noqa: E501
"node_id": (str,), # noqa: E501
"short_descriptor": (str,), # noqa: E501
"space_after": (btp_space10.BTPSpace10,), # noqa: E501
"space_before": (btp_space10.BTPSpace10,), # noqa: E501
"space_default": (bool,), # noqa: E501
"start_source_location": (int,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"bt_type": "btType", # noqa: E501
"value": "value", # noqa: E501
"atomic": "atomic", # noqa: E501
"documentation_type": "documentationType", # noqa: E501
"end_source_location": "endSourceLocation", # noqa: E501
"node_id": "nodeId", # noqa: E501
"short_descriptor": "shortDescriptor", # noqa: E501
"space_after": "spaceAfter", # noqa: E501
"space_before": "spaceBefore", # noqa: E501
"space_default": "spaceDefault", # noqa: E501
"start_source_location": "startSourceLocation", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
"_composed_instances",
"_var_name_to_model_instances",
"_additional_properties_model_instances",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""btp_literal_boolean255.BTPLiteralBoolean255 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
bt_type (str): [optional] # noqa: E501
value (bool): [optional] # noqa: E501
atomic (bool): [optional] # noqa: E501
documentation_type (str): [optional] # noqa: E501
end_source_location (int): [optional] # noqa: E501
node_id (str): [optional] # noqa: E501
short_descriptor (str): [optional] # noqa: E501
space_after (btp_space10.BTPSpace10): [optional] # noqa: E501
space_before (btp_space10.BTPSpace10): [optional] # noqa: E501
space_default (bool): [optional] # noqa: E501
start_source_location (int): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
constant_args = {
"_check_type": _check_type,
"_path_to_item": _path_to_item,
"_from_server": _from_server,
"_configuration": _configuration,
}
required_args = {}
# remove args whose value is Null because they are unset
required_arg_names = list(required_args.keys())
for required_arg_name in required_arg_names:
if required_args[required_arg_name] is nulltype.Null:
del required_args[required_arg_name]
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in six.iteritems(kwargs):
if (
var_name in unused_args
and self._configuration is not None
and self._configuration.discard_unknown_keys
and not self._additional_properties_model_instances
):
# discard variable.
continue
setattr(self, var_name, var_value)
@staticmethod
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
"anyOf": [],
"allOf": [
btm_parameter_boolean144_all_of.BTMParameterBoolean144AllOf,
btp_literal253.BTPLiteral253,
],
"oneOf": [],
}
|
onshape-public/onshape-clients
|
python/onshape_client/oas/models/btp_literal_boolean255.py
|
Python
|
mit
| 9,062
| 0.00011
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 Glencoe Software, Inc. All Rights Reserved.
# Use is subject to license terms supplied in LICENSE.txt
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Automatic configuration of memory settings for Java servers.
"""
from types import StringType
from shlex import split
import logging
LOGGER = logging.getLogger("omero.install.jvmcfg")
def strip_dict(map, prefix=("omero", "jvmcfg"), suffix=(), limit=1):
"""
For the given dictionary, return a copy of the
dictionary where all entries not matching the
prefix, suffix, and limit have been removed and
where all remaining keys have had the prefix and
suffix stripped. The limit describes the number
of elements that are allowed in the new key after
stripping prefix and suffix.
"""
if isinstance(prefix, StringType):
prefix = tuple(prefix.split("."))
if isinstance(suffix, StringType):
suffix = tuple(suffix.split("."))
rv = dict()
if not map:
return dict()
def __strip_dict(k, v, prefix, suffix, rv):
key = tuple(k.split("."))
ksz = len(key)
psz = len(prefix)
ssz = len(suffix)
if ksz <= (psz + ssz):
return # No way to strip if smaller
if key[0:psz] == prefix and key[ksz-ssz:] == suffix:
newkey = key[psz:ksz-ssz]
if len(newkey) == limit:
newkey = ".".join(newkey)
rv[newkey] = v
for k, v in map.items():
__strip_dict(k, v, prefix, suffix, rv)
return rv
class StrategyRegistry(dict):
def __init__(self, *args, **kwargs):
super(dict, self).__init__(*args, **kwargs)
STRATEGY_REGISTRY = StrategyRegistry()
class Settings(object):
"""
Container for the config options found in etc/grid/config.xml
"""
def __init__(self, server_values=None, global_values=None):
if server_values is None:
self.__server = dict()
else:
self.__server = server_values
if global_values is None:
self.__global = dict()
else:
self.__global = global_values
self.__static = {
"strategy": PercentStrategy,
"append": "",
"perm_gen": "128m",
"heap_dump": "off",
"heap_size": "512m",
"system_memory": None,
"max_system_memory": "48000",
"min_system_memory": "3414",
}
self.__manual = dict()
def __getattr__(self, key):
return self.lookup(key)
def lookup(self, key, default=None):
if key in self.__manual:
return self.__manual[key]
elif key in self.__server:
return self.__server[key]
elif key in self.__global:
return self.__global[key]
elif key in self.__static:
return self.__static[key]
else:
return default
def overwrite(self, key, value, always=False):
if self.was_set(key) and not always:
# Then we leave it as the user requested
return
else:
self.__manual[key] = value
def was_set(self, key):
return key in self.__server or key in self.__global
def get_strategy(self):
return STRATEGY_REGISTRY.get(self.strategy, self.strategy)
def __str__(self):
rv = dict()
rv.update(self.__server)
rv.update(self.__global)
if not rv:
rv = ""
return 'Settings(%s)' % rv
class Strategy(object):
"""
Strategy for calculating memory settings. Primary
class of the memory module.
"""
def __init__(self, name, settings=None):
"""
'name' argument should likely be one of:
('blitz', 'indexer', 'pixeldata', 'repository')
"""
if settings is None:
settings = Settings()
self.name = name
self.settings = settings
if type(self) == Strategy:
raise Exception("Must subclass!")
# Memory helpers
def system_memory_mb(self):
"""
Returns a tuple, in MB, of available, active, and total memory.
"total" memory is found by calling to first a Python library
(if installed) and otherwise a Java class. If
"system_memory" is set, it will short-circuit both methods.
"active" memory is set to "total" but limited by "min_system_memory"
and "max_system_memory".
"available" may not be accurate, and in some cases will be
set to total.
"""
available, total = None, None
if self.settings.system_memory is not None:
total = int(self.settings.system_memory)
available = total
else:
pymem = self._system_memory_mb_psutil()
if pymem is not None:
available, total = pymem
else:
available, total = self._system_memory_mb_java()
max_system_memory = int(self.settings.max_system_memory)
min_system_memory = int(self.settings.min_system_memory)
active = max(min(total, max_system_memory), min_system_memory)
return available, active, total
def _system_memory_mb_psutil(self):
try:
import psutil
pymem = psutil.virtual_memory()
return (pymem.free/1000000, pymem.total/1000000)
except ImportError:
LOGGER.debug("No psutil installed")
return None
def _system_memory_mb_java(self):
import omero.cli
import omero.java
# Copied from db.py. Needs better dir detection
cwd = omero.cli.CLI().dir
server_jar = cwd / "lib" / "server" / "server.jar"
cmd = ["ome.services.util.JvmSettingsCheck", "--psutil"]
p = omero.java.popen(["-cp", str(server_jar)] + cmd)
o, e = p.communicate()
if p.poll() != 0:
LOGGER.warn("Failed to invoke java:\nout:%s\nerr:%s",
o, e)
rv = dict()
for line in o.split("\n"):
line = line.strip()
if not line:
continue
parts = line.split(":")
if len(parts) == 1:
parts.append("")
rv[parts[0]] = parts[1]
try:
free = long(rv["Free"]) / 1000000
except:
LOGGER.warn("Failed to parse Free from %s", rv)
free = 2000
try:
total = long(rv["Total"]) / 1000000
except:
LOGGER.warn("Failed to parse Total from %s", rv)
total = 4000
return (free, total)
# API Getters
def get_heap_size(self, sz=None):
if sz is None or self.settings.was_set("heap_size"):
sz = self.settings.heap_size
if str(sz).startswith("-X"):
return sz
else:
rv = "-Xmx%s" % sz
if rv[-1].lower() not in ("b", "k", "m", "g"):
rv = "%sm" % rv
return rv
def get_heap_dump(self):
hd = self.settings.heap_dump
if hd == "off":
return ""
elif hd in ("on", "cwd", "tmp"):
return "-XX:+HeapDumpOnOutOfMemoryError"
def get_perm_gen(self):
pg = self.settings.perm_gen
if str(pg).startswith("-XX"):
return pg
else:
return "-XX:MaxPermSize=%s" % pg
def get_append(self):
values = []
if self.settings.heap_dump == "tmp":
import tempfile
tmp = tempfile.gettempdir()
values.append("-XX:HeapDumpPath=%s" % tmp)
return values + split(self.settings.append)
def get_memory_settings(self):
values = [
self.get_heap_size(),
self.get_heap_dump(),
self.get_perm_gen(),
]
if any([x.startswith("-XX:MaxPermSize") for x in values]):
values.append("-XX:+IgnoreUnrecognizedVMOptions")
values += self.get_append()
return [x for x in values if x]
class ManualStrategy(Strategy):
"""
Simplest strategy which assumes all values have
been set and simply uses them or their defaults.
"""
class PercentStrategy(Strategy):
"""
Strategy based on a percent of available memory.
"""
PERCENT_DEFAULTS = (
("blitz", 15),
("pixeldata", 15),
("indexer", 10),
("repository", 10),
("other", 1),
)
def __init__(self, name, settings=None):
super(PercentStrategy, self).__init__(name, settings)
self.defaults = dict(self.PERCENT_DEFAULTS)
self.use_active = True
def get_heap_size(self):
"""
Uses the results of the default settings of
calculate_heap_size() as an argument to
get_heap_size(), in other words some percent
of the active memory.
"""
sz = self.calculate_heap_size()
return super(PercentStrategy, self).get_heap_size(sz)
def get_percent(self):
other = self.defaults.get("other", "1")
default = self.defaults.get(self.name, other)
percent = int(self.settings.lookup("percent", default))
return percent
def get_perm_gen(self):
available, active, total = self.system_memory_mb()
choice = self.use_active and active or total
if choice <= 4000:
if choice >= 2000:
self.settings.overwrite("perm_gen", "256m")
elif choice <= 8000:
self.settings.overwrite("perm_gen", "512m")
else:
self.settings.overwrite("perm_gen", "1g")
return super(PercentStrategy, self).get_perm_gen()
def calculate_heap_size(self, method=None):
"""
Re-calculates the appropriate heap size based on the
value of get_percent(). The "active" memory returned
by method() will be used by default, but can be modified
to use "total" via the "use_active" flag.
"""
if method is None:
method = self.system_memory_mb
available, active, total = method()
choice = self.use_active and active or total
percent = self.get_percent()
calculated = choice * int(percent) / 100
return calculated
def usage_table(self, min=10, max=20):
total_mb = [2**x for x in range(min, max)]
for total in total_mb:
method = lambda: (total, total, total)
yield total, self.calculate_heap_size(method)
STRATEGY_REGISTRY["manual"] = ManualStrategy
STRATEGY_REGISTRY["percent"] = PercentStrategy
def adjust_settings(config, template_xml,
blitz=None, indexer=None,
pixeldata=None, repository=None):
"""
Takes an omero.config.ConfigXml object and adjusts
the memory settings. Primary entry point to the
memory module.
"""
from xml.etree.ElementTree import Element
from collections import defaultdict
replacements = dict()
options = dict()
for template in template_xml.findall("server-template"):
for server in template.findall("server"):
for option in server.findall("option"):
o = option.text
if o.startswith("MEMORY:"):
options[o[7:]] = (server, option)
for props in server.findall("properties"):
for prop in props.findall("property"):
name = prop.attrib.get("name", "")
if name.startswith("REPLACEMENT:"):
replacements[name[12:]] = (server, prop)
rv = defaultdict(list)
m = config.as_map()
loop = (("blitz", blitz), ("indexer", indexer),
("pixeldata", pixeldata), ("repository", repository))
for name, StrategyType in loop:
if name not in options:
raise Exception(
"Cannot find %s option. Make sure templates.xml was "
"not copied from an older server" % name)
for name, StrategyType in loop:
specific = strip_dict(m, suffix=name)
defaults = strip_dict(m)
settings = Settings(specific, defaults)
rv[name].append(settings)
if StrategyType is None:
StrategyType = settings.get_strategy()
if not callable(StrategyType):
raise Exception("Bad strategy: %s" % StrategyType)
strategy = StrategyType(name, settings)
settings = strategy.get_memory_settings()
server, option = options[name]
idx = 0
for v in settings:
rv[name].append(v)
if idx == 0:
option.text = v
else:
elem = Element("option")
elem.text = v
server.insert(idx, elem)
idx += 1
# Now we check for any other properties and
# put them where the replacement should go.
for k, v in m.items():
r = []
suffix = ".%s" % name
size = len(suffix)
if k.endswith(suffix):
k = k[:-size]
r.append((k, v))
server, replacement = replacements[name]
idx = 0
for k, v in r:
if idx == 0:
replacement.attrib["name"] = k
replacement.attrib["value"] = v
else:
elem = Element("property", name=k, value=v)
server.append(elem)
return rv
def usage_charts(path,
min=0, max=20,
Strategy=PercentStrategy, name="blitz"):
# See http://matplotlib.org/examples/pylab_examples/anscombe.html
from pylab import array
from pylab import axis
from pylab import gca
from pylab import subplot
from pylab import plot
from pylab import setp
from pylab import savefig
from pylab import text
points = 200
x = array([2 ** (x / points) / 1000
for x in range(min*points, max*points)])
y_configs = (
(Settings({}), 'A'),
(Settings({"percent": "20"}), 'B'),
(Settings({}), 'C'),
(Settings({"max_system_memory": "10000"}), 'D'),
)
def f(cfg):
s = Strategy(name, settings=cfg[0])
y = []
for total in x:
method = lambda: (total, total, total)
y.append(s.calculate_heap_size(method))
return y
y1 = f(y_configs[0])
y2 = f(y_configs[1])
y3 = f(y_configs[2])
y4 = f(y_configs[3])
axis_values = [0, 20, 0, 6]
def ticks_f():
setp(gca(), xticks=(8, 16), yticks=(2, 4))
def text_f(which):
cfg = y_configs[which]
# s = cfg[0]
txt = "%s" % (cfg[1],)
text(2, 2, txt, fontsize=20)
subplot(221)
plot(x, y1)
axis(axis_values)
text_f(0)
ticks_f()
subplot(222)
plot(x, y2)
axis(axis_values)
text_f(1)
ticks_f()
subplot(223)
plot(x, y3)
axis(axis_values)
text_f(2)
ticks_f()
subplot(224)
plot(x, y4)
axis(axis_values)
text_f(3)
ticks_f()
savefig(path)
|
tp81/openmicroscopy
|
components/tools/OmeroPy/src/omero/install/jvmcfg.py
|
Python
|
gpl-2.0
| 15,795
| 0.000253
|
#!/usr/bin/env python
"""
Input: fasta, int
Output: tabular
Return titles with lengths of corresponding seq
"""
import sys
assert sys.version_info[:2] >= ( 2, 4 )
def compute_fasta_length( fasta_file, out_file, keep_first_char, keep_first_word=False ):
infile = fasta_file
out = open( out_file, 'w')
keep_first_char = int( keep_first_char )
fasta_title = ''
seq_len = 0
# number of char to keep in the title
if keep_first_char == 0:
keep_first_char = None
else:
keep_first_char += 1
first_entry = True
for line in open( infile ):
line = line.strip()
if not line or line.startswith( '#' ):
continue
if line[0] == '>':
if first_entry is False:
if keep_first_word:
fasta_title = fasta_title.split()[0]
out.write( "%s\t%d\n" % ( fasta_title[ 1:keep_first_char ], seq_len ) )
else:
first_entry = False
fasta_title = line
seq_len = 0
else:
seq_len += len(line)
# last fasta-entry
if keep_first_word:
fasta_title = fasta_title.split()[0]
out.write( "%s\t%d\n" % ( fasta_title[ 1:keep_first_char ], seq_len ) )
out.close()
if __name__ == "__main__" :
compute_fasta_length( sys.argv[1], sys.argv[2], sys.argv[3], True )
|
icaoberg/cellorganizer-galaxy-tools
|
datatypes/converters/fasta_to_len.py
|
Python
|
gpl-3.0
| 1,377
| 0.02106
|
class User(object):
_instance = None
def __new__(cls, *args):
if not cls._instance:
cls._instance = super(User, cls).__new__(cls, *args)
return cls._instance
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
@staticmethod
def get_id():
return '1'
@staticmethod
def get_by_id(*args):
return User()
|
sanchopanca/rcblog
|
rcblog/user.py
|
Python
|
mit
| 509
| 0
|
import tkinter as tk
from .convertwidget import ConvertWidget
class SpeedWidget(ConvertWidget):
"""Widget used to convert weight and mass units
Attributes:
root The Frame parent of the widget.
"""
def __init__(self, root):
super(SpeedWidget, self).__init__(root)
self.root = root
self._init_frames()
self._init_binds()
def _init_frames(self):
# Creation of the main frame
f_main = tk.Frame(self.root)
f_main.pack(fill="both", expand="yes", side=tk.TOP)
def _init_binds(self):
pass
if __name__ == '__main__':
pass
|
NicolasBi/super_converter
|
sconv/lib/gui/convertwidget/speed.py
|
Python
|
gpl-3.0
| 631
| 0
|
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module defines an interface for finding named resources.
Due to license restrictions, not all software dependences can be shipped with
PerfKitBenchmarker.
Those that can be included in perfkitbenchmarker/data, or
perfkitbenchmarker/scripts and are loaded via a PackageResourceLoader.
Users can specify additional paths to search for required data files using the
`--data_search_paths` flag.
"""
import abc
import logging
import os
import shutil
import pkg_resources
from perfkitbenchmarker import flags
from perfkitbenchmarker import temp_dir
FLAGS = flags.FLAGS
flags.DEFINE_multistring('data_search_paths', ['.'],
'Additional paths to search for data files. '
'These paths will be searched prior to using files '
'bundled with PerfKitBenchmarker.')
_RESOURCES = 'resources'
class ResourceNotFound(ValueError):
"""Error raised when a resource could not be found on the search path."""
pass
class ResourceLoader(object):
"""An interface for loading named resources."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def ResourceExists(self, name):
"""Checks for existence of the resource 'name'.
Args:
name: string. Name of the resource. Typically a file name.
Returns:
A boolean indicating whether the resource 'name' can be loaded by this
object.
"""
pass
@abc.abstractmethod
def ResourcePath(self, name):
"""Gets the path to the resource 'name'.
Args:
name: string. Name of the resource. Typically a file name.
Returns:
A full path to 'name' on the filesystem.
Raises:
ResourceNotFound: If 'name' was not found.
"""
pass
class FileResourceLoader(ResourceLoader):
"""Loads resources from a directory in the filesystem.
Attributes:
path: string. Root path to load resources from.
"""
def __init__(self, path):
self.path = path
if not os.path.isdir(path):
logging.warn('File resource loader root %s is not a directory.', path)
def __repr__(self):
return '<{0} path="{1}">'.format(type(self).__name__, self.path)
def _Join(self, *args):
return os.path.join(self.path, *args)
def ResourceExists(self, name):
return os.path.exists(self._Join(name))
def ResourcePath(self, name):
if not self.ResourceExists(name):
raise ResourceNotFound(name)
return self._Join(name)
class PackageResourceLoader(ResourceLoader):
"""Loads resources from a Python package.
Attributes:
package: string. Name of the package containing resources.
"""
def __init__(self, package):
self.package = package
def __repr__(self):
return '<{0} package="{1}">'.format(type(self).__name__, self.package)
def ResourceExists(self, name):
return pkg_resources.resource_exists(self.package, name)
def ResourcePath(self, name):
if not self.ResourceExists(name):
raise ResourceNotFound(name)
try:
path = pkg_resources.resource_filename(self.package, name)
except NotImplementedError:
# This can happen if PerfKit Benchmarker is executed from a zip file.
# Extract the resource to the version-specific temporary directory.
path = os.path.join(temp_dir.GetVersionDirPath(), _RESOURCES, name)
if not os.path.exists(path):
dir_path = os.path.dirname(path)
try:
os.makedirs(dir_path)
except OSError:
if not os.path.isdir(dir_path):
raise
with open(path, 'wb') as extracted_file:
shutil.copyfileobj(pkg_resources.resource_stream(self.package, name),
extracted_file)
return path
DATA_PACKAGE_NAME = 'perfkitbenchmarker.data'
SCRIPT_PACKAGE_NAME = 'perfkitbenchmarker.scripts'
CONFIG_PACKAGE_NAME = 'perfkitbenchmarker.configs'
DEFAULT_RESOURCE_LOADERS = [PackageResourceLoader(DATA_PACKAGE_NAME),
PackageResourceLoader(SCRIPT_PACKAGE_NAME),
PackageResourceLoader(CONFIG_PACKAGE_NAME)]
def _GetResourceLoaders():
"""Gets a list of registered ResourceLoaders.
Returns:
List of ResourceLoader instances. FileResourceLoaders for paths in
FLAGS.data_search_paths will be listed first, followed by
DEFAULT_RESOURCE_LOADERS.
"""
loaders = []
# Add all paths to list if they are specified on the command line (will warn
# if any are invalid).
# Otherwise add members of the default list iff they exist.
if FLAGS['data_search_paths'].present:
for path in FLAGS.data_search_paths:
loaders.append(FileResourceLoader(path))
else:
for path in FLAGS.data_search_paths:
if os.path.isdir(path):
loaders.append(FileResourceLoader(path))
loaders.extend(DEFAULT_RESOURCE_LOADERS)
return loaders
def ResourcePath(resource_name, search_user_paths=True):
"""Gets the filename of a resource.
Loaders are searched in order until the resource is found.
If no loader provides 'resource_name', an exception is thrown.
If 'search_user_paths' is true, the directories specified by
"--data_search_paths" are consulted before the default paths.
Args:
resource_name: string. Name of a resource.
search_user_paths: boolean. Whether paths from "--data_search_paths" should
be searched before the default paths.
Returns:
A path to the resource on the filesystem.
Raises:
ResourceNotFound: When resource was not found.
"""
if search_user_paths:
loaders = _GetResourceLoaders()
else:
loaders = DEFAULT_RESOURCE_LOADERS
for loader in loaders:
if loader.ResourceExists(resource_name):
return loader.ResourcePath(resource_name)
raise ResourceNotFound(
'{0} (Searched: {1})'.format(resource_name, loaders))
|
xiaolihope/PerfKitBenchmarker-1.7.0
|
perfkitbenchmarker/data/__init__.py
|
Python
|
apache-2.0
| 6,390
| 0.007355
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os
import string
import sys
import TestSCons
_python_ = TestSCons._python_
_exe = TestSCons._exe
test = TestSCons.TestSCons()
test.write('myrpcgen.py', """
import getopt
import string
import sys
cmd_opts, args = getopt.getopt(sys.argv[1:], 'chlmo:', [])
for opt, arg in cmd_opts:
if opt == '-o': output = open(arg, 'wb')
output.write(string.join(sys.argv) + "\\n")
for a in args:
contents = open(a, 'rb').read()
output.write(string.replace(contents, 'RPCGEN', 'myrpcgen.py'))
output.close()
sys.exit(0)
""")
test.write('SConstruct', """\
env = Environment(RPCGEN = r'%(_python_)s myrpcgen.py',
tools=['default', 'rpcgen'])
env.RPCGenHeader('rpcif')
env.RPCGenClient('rpcif')
env.RPCGenService('rpcif')
env.RPCGenXDR('rpcif')
""" % locals())
test.write('rpcif.x', """\
RPCGEN
""")
test.run()
output = "myrpcgen.py %s -o %s rpcif.x\nmyrpcgen.py\n"
expect_clnt = output % ('-l', test.workpath('rpcif_clnt.c'))
expect_h = output % ('-h', test.workpath('rpcif.h'))
expect_svc = output % ('-m', test.workpath('rpcif_svc.c'))
expect_xdr = output % ('-c', test.workpath('rpcif_xdr.c'))
test.must_match('rpcif_clnt.c', expect_clnt)
test.must_match('rpcif.h', expect_h)
test.must_match('rpcif_svc.c', expect_svc)
test.must_match('rpcif_xdr.c', expect_xdr)
rpcgen = test.where_is('rpcgen')
if rpcgen:
test.subdir('do_rpcgen')
test.write('SConstruct', """\
import os
env = Environment(ENV=os.environ)
env.Program('rpcclnt', ['rpcclnt.c', 'do_rpcgen/rpcif_clnt.c'])
env.RPCGenHeader('do_rpcgen/rpcif')
env.RPCGenClient('do_rpcgen/rpcif')
env.RPCGenService('do_rpcgen/rpcif')
env.RPCGenXDR('do_rpcgen/rpcif')
""")
test.write(['do_rpcgen', 'rpcif.x'], """\
program RPCTEST_IF
{
version RPCTEST_IF_VERSION
{
int START(unsigned long) = 1;
int STOP(unsigned long) = 2;
int STATUS(unsigned long) = 3;
} = 1; /* version */
} = 0xfeedf00d; /* portmap program ID */
""")
# Following test tries to make sure it can compile and link, but when
# it's run it doesn't actually invoke any rpc operations because that
# would have significant dependencies on network configuration,
# portmapper, etc. that aren't necessarily appropriate for an scons
# test.
test.write('rpcclnt.c', """\
#include <rpc/rpc.h>
#include <rpc/pmap_clnt.h>
#include "do_rpcgen/rpcif.h"
int main(int argc, char **args) {
const char* const SERVER = "localhost";
CLIENT *cl;
int *rslt;
unsigned long arg = 0;
if (argc > 2) {
cl = clnt_create( SERVER, RPCTEST_IF, RPCTEST_IF_VERSION, "udp" );
if (cl == 0 ) { return 1; }
rslt = start_1(&arg, cl);
if (*rslt == 0) { clnt_perror( cl, SERVER ); return 1; }
clnt_destroy(cl);
} else
printf("Hello!\\n");
return 0;
}
""")
test.run()
test.run(program=test.workpath('rpcclnt'+_exe))
test.fail_test(not test.stdout() in ["Hello!\n", "Hello!\r\n"])
test.pass_test()
|
datalogics/scons
|
test/Rpcgen/RPCGEN.py
|
Python
|
mit
| 4,064
| 0.000984
|
# -*- coding: utf-8 -*-
# Copyright 2012 Jaap Karssenberg <jaap.karssenberg@gmail.com>
import gtk
from zim.fs import TrashNotSupportedError
from zim.config import XDG_DATA_HOME, data_file
from zim.templates import list_template_categories, list_templates
from zim.gui.widgets import Dialog, BrowserTreeView, Button, ScrolledWindow
class TemplateEditorDialog(Dialog):
'''Dialog with a tree of available templates for export and new pages.
Allows edit, delete, and create new templates. Uses external editor.
'''
def __init__(self, ui):
Dialog.__init__(self, ui,
_('Templates'), help='Help:Templates', buttons=gtk.BUTTONS_CLOSE,
defaultwindowsize=(400, 450))
# T: Dialog title
label = gtk.Label()
label.set_markup('<b>'+_('Templates')+'</b>')
# T: Section in dialog
label.set_alignment(0.0, 0.5)
self.vbox.pack_start(label, False)
hbox = gtk.HBox()
self.vbox.add(hbox)
self.view = TemplateListView()
self.view.connect('row-activated', self.on_selection_changed)
hbox.add(ScrolledWindow(self.view))
vbbox = gtk.VButtonBox()
vbbox.set_layout(gtk.BUTTONBOX_START)
hbox.pack_start(vbbox, False)
view_button = Button(stock='gtk-file', label=_('_View')) # T: button label
view_button.connect('clicked', self.on_view)
copy_button = Button(stock='gtk-copy')
copy_button.connect('clicked', self.on_copy)
edit_button = Button(stock='gtk-edit')
edit_button.connect('clicked', self.on_edit)
delete_button = gtk.Button(stock='gtk-remove')
delete_button.connect('clicked', self.on_delete)
for b in (view_button, copy_button, edit_button, delete_button):
b.set_alignment(0.0, 0.5)
vbbox.add(b)
browse_button = Button(_('Browse')) # T: button label
browse_button.connect('clicked', self.on_browse)
self.add_extra_button(browse_button)
self._buttonbox = vbbox
self._delete_button = delete_button
self.on_selection_changed()
## Same button appears in export dialog
if gtk.gtk_version >= (2, 10) \
and gtk.pygtk_version >= (2, 10):
url_button = gtk.LinkButton(
'https://github.com/jaap-karssenberg/zim-wiki/wiki/Templates',
_('Get more templates online') # T: label for button with URL
)
self.vbox.pack_start(url_button, False)
def on_selection_changed(self, *a):
# Set sensitivity of the buttons
# All insensitive if category (folder) is selected
# Delete insensitive if only a default
custom, default = self.view.get_selected()
for button in self._buttonbox.get_children():
button.set_sensitive(custom is not None)
if custom is None:
return
if not custom.exists():
self._delete_button.set_sensitive(False)
def on_view(self, *a):
# Open the file, witout waiting for editor to return
custom, default = self.view.get_selected()
if custom is None:
return # Should not have been sensitive
if custom.exists():
self.ui.open_file(custom)
else:
assert default and default.exists()
self.ui.open_file(default)
def on_copy(self, *a):
# Create a new template in this category
custom, default = self.view.get_selected()
if custom is None:
return # Should not have been sensitive
if custom.exists():
source = custom
else:
assert default and default.exists()
source = default
name = PromptNameDialog(self).run()
_, ext = custom.basename.rsplit('.', 1)
basename = name + '.' + ext
newfile = custom.dir.file(basename)
source.copyto(newfile)
self.view.refresh()
def on_edit(self, *a):
custom, default = self.view.get_selected()
if custom is None:
return # Should not have been sensitive
if not custom.exists():
# Copy default
default.copyto(custom)
self.ui.edit_file(custom, istextfile=True, dialog=self)
self.view.refresh()
def on_delete(self, *a):
# Only delete custom, may result in reset to default
custom, default = self.view.get_selected()
if custom is None or not custom.exists():
return # Should not have been sensitive
try:
custom.trash()
except TrashNotSupportedError:
# TODO warnings
custom.remove()
self.view.refresh()
def on_browse(self, *a):
dir = XDG_DATA_HOME.subdir(('zim', 'templates'))
self.ui.open_dir(dir)
class PromptNameDialog(Dialog):
def __init__(self, ui):
Dialog.__init__(self, ui, _('Copy Template')) # T: Dialog title
self.add_form([
('name', 'string', _('Name')),
# T: Input label for the new name when copying a template
])
def do_response_ok(self):
self.result = self.form['name']
if self.result:
return True
class TemplateListView(BrowserTreeView):
BASENAME_COL = 0
FILE_COL = 1
DEFAULT_COL = 2
def __init__(self):
BrowserTreeView.__init__(self)
model = gtk.TreeStore(str, object, object)
# BASENAME_COL, FILE_COL, DEFAULT_COL
self.set_model(model)
self.set_headers_visible(False)
cell_renderer = gtk.CellRendererText()
column = gtk.TreeViewColumn('_template_', cell_renderer, text=self.BASENAME_COL)
self.append_column(column)
self.refresh()
def get_selected(self):
# Returns (base, default file) or (None, None)
model, iter = self.get_selection().get_selected()
if model is None or iter is None:
return None, None
else:
return model[iter][self.FILE_COL], model[iter][self.DEFAULT_COL]
def refresh(self):
model = self.get_model()
model.clear()
for category in list_template_categories():
parent = model.append(None, (category, None, None))
for name, basename in list_templates(category):
base = XDG_DATA_HOME.file(('zim', 'templates', category, basename))
default = data_file(('templates', category, basename)) # None if not existing
#~ print '>>>', name, base, default
model.append(parent, (name, base, default))
self.expand_all()
|
fabricehong/zim-desktop
|
zim/gui/templateeditordialog.py
|
Python
|
gpl-2.0
| 5,701
| 0.029118
|
import unittest
from db.migrations import migrations_util
class TestMigrationUtil(unittest.TestCase):
"""Test the CLI API."""
@classmethod
def setUpClass(cls):
cls.db_path = '/some/random/path/file.db'
def setUp(self):
self.parser = migrations_util.make_argument_parser(self.db_path)
def test_cli_parser_default(self):
options = self.parser.parse_args(['upgrade'])
self.assertEqual(options.path, self.db_path)
self.assertEqual(options.action, 'upgrade')
def test_cli_parser_user(self):
other_db_path = '/some/other/path/file.db'
options = self.parser.parse_args([
'downgrade',
'--path',
other_db_path
])
self.assertEqual(options.path, other_db_path)
self.assertEqual(options.action, 'downgrade')
def test_cli_parser_bad_action(self):
self.assertRaises(
SystemExit,
self.parser.parse_args,
['retrograde']
)
if __name__ == '__main__':
unittest.main()
|
im0rtel/OpenBazaar
|
tests/test_migrations_util.py
|
Python
|
mit
| 1,060
| 0.000943
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016-present, CloudZero, Inc. All rights reserved.
# Licensed under the BSD-style license. See LICENSE file in the project root for full license information.
"""
Plugin used to extract all contextual information we can from EC2 network-interface (ENI) Cloudtrail Events.
"""
import lambda_tools
import reactor.common.cznef as cznef
# TODO: This set of events needs more scrutiny. It's very likely these lists are not quite right.
logger = lambda_tools.setup_logging('reactor')
event_categories = cznef.CategorizedEventNames()
event_categories.modification_events = {
'AssignIpv6Addresses',
'AssignPrivateIpAddresses',
'AssociateAddress',
'DisassociateAddress',
'ReleaseAddress',
'UnassignIpv6Addresses',
'UnassignPrivateIpAddresses',
'ResetNetworkInterfaceAttribute',
'CreateNetworkInterfacePermission',
'DeleteNetworkInterfacePermission',
}
event_categories.deletion_events = {
'DeleteNetworkInterface',
}
event_categories.read_only_events = {
'DescribeAddresses',
'DescribeNetworkInterfaceAttribute',
'DescribeNetworkInterfacePermissions',
'DescribeNetworkInterfaces',
}
event_categories.creation_events = {
'CreateNetworkInterface',
}
def matching_events():
"""
Defines the set of cloudtrail event names that match this service/resource type.
Returns:
set(str) - the set of event names
"""
return event_categories.all_events
def normalize_event(first_pass_normalized_event):
"""
Given a partially-normalized CZ-NEF event, extract all useful resources and transform it into full CZ-NEF
Args:
first_pass_normalized_event (dict): A partially-processed CZ-NEF event. This version will have some of
the general-purpose fields normalized, but the specifics will be missing.
Returns:
dict - the fully normalized CZ-NEF event
"""
event_name = first_pass_normalized_event['event_name']
logger.debug(f'Running plugin for {event_name}')
normalized_event = {
**first_pass_normalized_event,
'service_type': 'ec2',
'resource_type': 'network-interface',
'event_flags': cznef.EventFlags.compute(event_name, event_categories),
}
logger.debug(f'Finished running plugin for {event_name}')
return normalized_event
|
Cloudzero/cloudzero-reactor-aws
|
reactor/features/cloudtrail_event_source/ingest_plugins/ec2/network_interface.py
|
Python
|
bsd-3-clause
| 2,385
| 0.002935
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Minimal web interface to cve-search to display the last entries
# and view a specific CVE.
#
# Software is free software released under the "Modified BSD license"
#
# Copyright (c) 2017 Pieter-Jan Moreels - pieterjan.moreels@gmail.com
# imports
import json
import os
import subprocess
import sys
_runPath = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(_runPath, ".."))
import lib.DatabaseLayer as db
import sbin.db_blacklist as bl
import sbin.db_whitelist as wl
from bson import json_util
from flask import Response, request, render_template
from functools import wraps
from io import StringIO
from lib.Authentication import AuthenticationHandler
from web.api import API, APIError
class Advanced_API(API):
def __init__(self):
super().__init__()
routes = [{'r': '/api/admin/whitelist', 'm': ['GET'], 'f': self.api_admin_whitelist},
{'r': '/api/admin/blacklist', 'm': ['GET'], 'f': self.api_admin_blacklist},
{'r': '/api/admin/whitelist/export', 'm': ['GET'], 'f': self.api_admin_whitelist},
{'r': '/api/admin/blacklist/export', 'm': ['GET'], 'f': self.api_admin_blacklist},
{'r': '/api/admin/whitelist/import', 'm': ['PUT'], 'f': self.api_admin_import_whitelist},
{'r': '/api/admin/blacklist/import', 'm': ['PUT'], 'f': self.api_admin_import_blacklist},
{'r': '/api/admin/whitelist/drop', 'm': ['POST'], 'f': self.api_admin_drop_whitelist},
{'r': '/api/admin/blacklist/drop', 'm': ['POST'], 'f': self.api_admin_drop_blacklist},
{'r': '/api/admin/whitelist/add', 'm': ['PUT'], 'f': self.api_admin_add_whitelist},
{'r': '/api/admin/blacklist/add', 'm': ['PUT'], 'f': self.api_admin_add_blacklist},
{'r': '/api/admin/whitelist/remove', 'm': ['PUT'], 'f': self.api_admin_remove_whitelist},
{'r': '/api/admin/blacklist/remove', 'm': ['PUT'], 'f': self.api_admin_remove_blacklist},
{'r': '/api/admin/get_token', 'm': ['GET'], 'f': self.api_admin_get_token},
{'r': '/api/admin/new_token', 'm': ['GET'], 'f': self.api_admin_generate_token},
{'r': '/api/admin/get_session', 'm': ['GET'], 'f': self.api_admin_get_session},
{'r': '/api/admin/start_session', 'm': ['GET'], 'f': self.api_admin_start_session},
{'r': '/api/admin/updatedb', 'm': ['GET'], 'f': self.api_update_db}]
for route in routes: self.addRoute(route)
#############
# Decorator #
#############
def getAuth():
method, auth = (request.headers.get('Authorization')+" ").split(" ", 1) # Adding and removing space to ensure decent split
name, key = (':'+auth.strip()).rsplit(":", 1)
name = name[1:] # Adding and removing colon to ensure decent split
return method, name, key
def authErrors():
# Check auth
if not request.headers.get('Authorization'):
return ({'status': 'error', 'reason': 'Authentication needed'}, 401)
method, name, token = Advanced_API.getAuth()
data = None
if method.lower() not in ['basic', 'token', 'session']:
data = ({'status': 'error', 'reason': 'Authorization method not allowed'}, 400)
else:
try:
authenticated = False
if method.lower() == 'basic':
authenticator = AuthenticationHandler()
if authenticator.validateUser(name, token): authenticated = True
elif method.lower() == 'token':
if db.getToken(name) == token: authenticated = True
elif method.lower() == 'session':
authenticator = AuthenticationHandler()
if authenticator.api_sessions.get(name) == token: authenticated = True
if not authenticated: data = ({'status': 'error', 'reason': 'Authentication failed'}, 401)
except Exception as e:
print(e)
data = ({'status': 'error', 'reason': 'Malformed Authentication String'}, 400)
if data:
return data
else: return None
def token_required(funct):
@wraps(funct)
def api_token(*args, **kwargs):
data = Advanced_API.authErrors()
if data:
return Response(json.dumps(data[0], indent=2, sort_keys=True, default=json_util.default), mimetype='application/json'), data[1]
else: return API.api(funct)(*args, **kwargs)
return api_token
##########
# ROUTES #
##########
# Overriding api_dbInfo to allow for logged-in users to get more info
def api_dbInfo(self):
errors = Advanced_API.authErrors()
admin = False if errors and errors[0].get('reason') == "Authentication needed" else True
return API.api(db.getDBStats)(admin)
# Overriding api_documentation to show the documentation for these functions
def api_documentation(self):
return render_template('api.html', advanced = True)
@token_required
def api_admin_whitelist(self):
return db.getWhitelist()
@token_required
def api_admin_blacklist(self):
return db.getBlacklist()
@token_required
def api_admin_import_whitelist(self):
return wl.importWhitelist(StringIO(request.data.decode("utf-8")))
@token_required
def api_admin_import_blacklist(self):
return bl.importBlacklist(StringIO(request.data.decode("utf-8")))
@token_required
def api_admin_drop_whitelist(self):
return wl.dropWhitelist()
@token_required
def api_admin_drop_blacklist(self):
return bl.dropBlacklist()
@token_required
def api_admin_add_whitelist(self):
return wl.insertWhitelist(request.form['cpe'], request.form['type'])
@token_required
def api_admin_add_blacklist(self):
return bl.insertBlacklist(request.form['cpe'], request.form['type'])
@token_required
def api_admin_remove_whitelist(self):
return wl.removeWhitelist(request.form['cpe'])
@token_required
def api_admin_remove_blacklist(self):
return bl.removeBlacklist(request.form['cpe'])
@token_required # Of course only the login credentials would work
def api_admin_get_token(self):
method, name, key = Advanced_API.getAuth()
return db.getToken(name)
@token_required
def api_admin_generate_token(self):
method, name, key = Advanced_API.getAuth()
return db.generateToken(name)
@token_required
def api_admin_get_session(self):
method, name, key = Advanced_API.getAuth()
_session = AuthenticationHandler().get_api_session(name)
if not _session: raise(APIError("Session not started", 412))
return _session
@token_required
def api_admin_start_session(self):
method, name, key = Advanced_API.getAuth()
return AuthenticationHandler().new_api_session(name)
@token_required
def api_update_db(self):
process = subprocess.Popen([sys.executable, os.path.join(_runPath, "../sbin/db_updater.py"), "-civ"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
return "%s\n\nErrors:\n%s"%(str(out,'utf-8'),str(err,'utf-8')) if err else str(out,'utf-8')
if __name__ == '__main__':
server = Advanced_API()
server.start()
|
deontp/misc
|
zenoic_api/cve-search-master/web/advanced_api.py
|
Python
|
gpl-3.0
| 7,190
| 0.017524
|
from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
from api.models import Hackathon
from hackfsu_com.admin import hackfsu_admin
class WifiCred(models.Model):
hackathon = models.ForeignKey(to=Hackathon, on_delete=models.CASCADE)
username = models.CharField(max_length=100)
password = models.CharField(max_length=100)
assigned_user = models.OneToOneField(User, on_delete=models.SET_NULL, default=None, null=True, blank=True)
def __str__(self):
return '[WifiCred {}]'.format(self.username)
@admin.register(WifiCred, site=hackfsu_admin)
class WifiCredAdmin(admin.ModelAdmin):
list_filter = ('hackathon',)
list_display = ('username', 'assigned_user',)
list_editable = ()
list_display_links = ('username',)
search_fields = ('assigned_user',)
ordering = ('assigned_user',)
|
andrewsosa/hackfsu_com
|
api/api/models/wifi_cred.py
|
Python
|
apache-2.0
| 879
| 0.002275
|
# Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Federated CIFAR-10 classification library using TFF."""
import functools
from typing import Callable, Optional
from absl import logging
import tensorflow as tf
import tensorflow_federated as tff
from fedopt_guide import training_loop
from utils.datasets import cifar10_dataset
from utils.models import resnet_models
CIFAR_SHAPE = (32, 32, 3)
NUM_CLASSES = 100
def run_federated(
iterative_process_builder: Callable[..., tff.templates.IterativeProcess],
client_epochs_per_round: int,
client_batch_size: int,
clients_per_round: int,
client_datasets_random_seed: Optional[int] = None,
crop_size: Optional[int] = 24,
total_rounds: Optional[int] = 1500,
experiment_name: Optional[str] = 'federated_cifar10',
root_output_dir: Optional[str] = '/tmp/fed_opt',
uniform_weighting: Optional[bool] = False,
**kwargs):
"""Runs an iterative process on the CIFAR-10 classification task.
This method will load and pre-process dataset and construct a model used for
the task. It then uses `iterative_process_builder` to create an iterative
process that it applies to the task, using
`federated_research.utils.training_loop`.
We assume that the iterative process has the following functional type
signatures:
* `initialize`: `( -> S@SERVER)` where `S` represents the server state.
* `next`: `<S@SERVER, {B*}@CLIENTS> -> <S@SERVER, T@SERVER>` where `S`
represents the server state, `{B*}` represents the client datasets,
and `T` represents a python `Mapping` object.
The iterative process must also have a callable attribute `get_model_weights`
that takes as input the state of the iterative process, and returns a
`tff.learning.ModelWeights` object.
Args:
iterative_process_builder: A function that accepts a no-arg `model_fn`, and
returns a `tff.templates.IterativeProcess`. The `model_fn` must return a
`tff.learning.Model`.
client_epochs_per_round: An integer representing the number of epochs of
training performed per client in each training round.
client_batch_size: An integer representing the batch size used on clients.
clients_per_round: An integer representing the number of clients
participating in each round.
client_datasets_random_seed: An optional int used to seed which clients are
sampled at each round. If `None`, no seed is used.
crop_size: An optional integer representing the resulting size of input
images after preprocessing.
total_rounds: The number of federated training rounds.
experiment_name: The name of the experiment being run. This will be appended
to the `root_output_dir` for purposes of writing outputs.
root_output_dir: The name of the root output directory for writing
experiment outputs.
uniform_weighting: Whether to weigh clients uniformly. If false, clients are
weighted by the number of samples.
**kwargs: Additional arguments configuring the training loop. For details on
supported arguments, see `federated_research/utils/training_utils.py`.
"""
crop_shape = (crop_size, crop_size, 3)
cifar_train, _ = cifar10_dataset.get_federated_datasets(
train_client_epochs_per_round=client_epochs_per_round,
train_client_batch_size=client_batch_size,
crop_shape=crop_shape)
_, cifar_test = cifar10_dataset.get_centralized_datasets(
crop_shape=crop_shape)
input_spec = cifar_train.create_tf_dataset_for_client(
cifar_train.client_ids[0]).element_spec
model_builder = functools.partial(
resnet_models.create_resnet18,
input_shape=crop_shape,
num_classes=NUM_CLASSES)
loss_builder = tf.keras.losses.SparseCategoricalCrossentropy
metrics_builder = lambda: [tf.keras.metrics.SparseCategoricalAccuracy()]
def tff_model_fn() -> tff.learning.Model:
return tff.learning.from_keras_model(
keras_model=model_builder(),
input_spec=input_spec,
loss=loss_builder(),
metrics=metrics_builder())
if uniform_weighting:
client_weight_fn = tff.learning.ClientWeighting.UNIFORM
else:
client_weight_fn = tff.learning.ClientWeighting.NUM_EXAMPLES
training_process = iterative_process_builder(tff_model_fn, client_weight_fn)
client_datasets_fn = functools.partial(
tff.simulation.build_uniform_sampling_fn(
dataset=cifar_train.client_ids,
random_seed=client_datasets_random_seed), # pytype: disable=wrong-keyword-args # gen-stub-imports
size=clients_per_round)
evaluate_fn = tff.learning.build_federated_evaluation(
tff_model_fn, use_experimental_simulation_loop=True)
def validation_fn(model_weights, round_num):
del round_num
return evaluate_fn(model_weights, [cifar_test])
def test_fn(model_weights):
return evaluate_fn(model_weights, [cifar_test])
logging.info('Training model:')
logging.info(model_builder().summary())
training_loop.run(
iterative_process=training_process,
train_client_datasets_fn=client_datasets_fn,
evaluation_fn=validation_fn,
test_fn=test_fn,
total_rounds=total_rounds,
experiment_name=experiment_name,
root_output_dir=root_output_dir,
**kwargs)
|
google-research/federated
|
fedopt_guide/cifar10_resnet/federated_cifar10.py
|
Python
|
apache-2.0
| 5,796
| 0.004313
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from libcloud.common.base import ConnectionUserAndKey, BaseDriver
from libcloud.backup.types import BackupTargetType
__all__ = [
"BackupTarget",
"BackupDriver",
"BackupTargetJob",
"BackupTargetRecoveryPoint",
]
class BackupTarget(object):
"""
A backup target
"""
def __init__(self, id, name, address, type, driver, extra=None):
"""
:param id: Target id
:type id: ``str``
:param name: Name of the target
:type name: ``str``
:param address: Hostname, FQDN, IP, file path etc.
:type address: ``str``
:param type: Backup target type (Physical, Virtual, ...).
:type type: :class:`.BackupTargetType`
:param driver: BackupDriver instance.
:type driver: :class:`.BackupDriver`
:param extra: (optional) Extra attributes (driver specific).
:type extra: ``dict``
"""
self.id = str(id) if id else None
self.name = name
self.address = address
self.type = type
self.driver = driver
self.extra = extra or {}
def update(self, name=None, address=None, extra=None):
return self.driver.update_target(
target=self, name=name, address=address, extra=extra
)
def delete(self):
return self.driver.delete_target(target=self)
def _get_numeric_id(self):
target_id = self.id
if target_id.isdigit():
target_id = int(target_id)
return target_id
def __repr__(self):
return "<Target: id=%s, name=%s, address=%s" "type=%s, provider=%s ...>" % (
self.id,
self.name,
self.address,
self.type,
self.driver.name,
)
class BackupTargetJob(object):
"""
A backup target job
"""
def __init__(self, id, status, progress, target, driver, extra=None):
"""
:param id: Job id
:type id: ``str``
:param status: Status of the job
:type status: :class:`BackupTargetJobStatusType`
:param progress: Progress of the job, as a percentage
:type progress: ``int``
:param target: BackupTarget instance.
:type target: :class:`.BackupTarget`
:param driver: BackupDriver instance.
:type driver: :class:`.BackupDriver`
:param extra: (optional) Extra attributes (driver specific).
:type extra: ``dict``
"""
self.id = str(id) if id else None
self.status = status
self.progress = progress
self.target = target
self.driver = driver
self.extra = extra or {}
def cancel(self):
return self.driver.cancel_target_job(job=self)
def suspend(self):
return self.driver.suspend_target_job(job=self)
def resume(self):
return self.driver.resume_target_job(job=self)
def __repr__(self):
return "<Job: id=%s, status=%s, progress=%s" "target=%s, provider=%s ...>" % (
self.id,
self.status,
self.progress,
self.target.id,
self.driver.name,
)
class BackupTargetRecoveryPoint(object):
"""
A backup target recovery point
"""
def __init__(self, id, date, target, driver, extra=None):
"""
:param id: Job id
:type id: ``str``
:param date: The date taken
:type date: :class:`datetime.datetime`
:param target: BackupTarget instance.
:type target: :class:`.BackupTarget`
:param driver: BackupDriver instance.
:type driver: :class:`.BackupDriver`
:param extra: (optional) Extra attributes (driver specific).
:type extra: ``dict``
"""
self.id = str(id) if id else None
self.date = date
self.target = target
self.driver = driver
self.extra = extra or {}
def recover(self, path=None):
"""
Recover this recovery point
:param path: The part of the recovery point to recover (optional)
:type path: ``str``
:rtype: Instance of :class:`.BackupTargetJob`
"""
return self.driver.recover_target(
target=self.target, recovery_point=self, path=path
)
def recover_to(self, recovery_target, path=None):
"""
Recover this recovery point out of place
:param recovery_target: Backup target with to recover the data to
:type recovery_target: Instance of :class:`.BackupTarget`
:param path: The part of the recovery point to recover (optional)
:type path: ``str``
:rtype: Instance of :class:`.BackupTargetJob`
"""
return self.driver.recover_target_out_of_place(
target=self.target,
recovery_point=self,
recovery_target=recovery_target,
path=path,
)
def __repr__(self):
return "<RecoveryPoint: id=%s, date=%s, " "target=%s, provider=%s ...>" % (
self.id,
self.date,
self.target.id,
self.driver.name,
)
class BackupDriver(BaseDriver):
"""
A base BackupDriver class to derive from
This class is always subclassed by a specific driver.
"""
connectionCls = ConnectionUserAndKey
name = None
website = None
def __init__(self, key, secret=None, secure=True, host=None, port=None, **kwargs):
"""
:param key: API key or username to used (required)
:type key: ``str``
:param secret: Secret password to be used (required)
:type secret: ``str``
:param secure: Whether to use HTTPS or HTTP. Note: Some providers
only support HTTPS, and it is on by default.
:type secure: ``bool``
:param host: Override hostname used for connections.
:type host: ``str``
:param port: Override port used for connections.
:type port: ``int``
:return: ``None``
"""
super(BackupDriver, self).__init__(
key=key, secret=secret, secure=secure, host=host, port=port, **kwargs
)
def get_supported_target_types(self):
"""
Get a list of backup target types this driver supports
:return: ``list`` of :class:``BackupTargetType``
"""
raise NotImplementedError(
"get_supported_target_types not implemented for this driver"
)
def list_targets(self):
"""
List all backuptargets
:rtype: ``list`` of :class:`.BackupTarget`
"""
raise NotImplementedError("list_targets not implemented for this driver")
def create_target(self, name, address, type=BackupTargetType.VIRTUAL, extra=None):
"""
Creates a new backup target
:param name: Name of the target
:type name: ``str``
:param address: Hostname, FQDN, IP, file path etc.
:type address: ``str``
:param type: Backup target type (Physical, Virtual, ...).
:type type: :class:`BackupTargetType`
:param extra: (optional) Extra attributes (driver specific).
:type extra: ``dict``
:rtype: Instance of :class:`.BackupTarget`
"""
raise NotImplementedError("create_target not implemented for this driver")
def create_target_from_node(self, node, type=BackupTargetType.VIRTUAL, extra=None):
"""
Creates a new backup target from an existing node.
By default, this will use the first public IP of the node
:param node: The Node to backup
:type node: ``Node``
:param type: Backup target type (Physical, Virtual, ...).
:type type: :class:`BackupTargetType`
:param extra: (optional) Extra attributes (driver specific).
:type extra: ``dict``
:rtype: Instance of :class:`.BackupTarget`
"""
return self.create_target(
name=node.name, address=node.public_ips[0], type=type, extra=None
)
def create_target_from_storage_container(
self, container, type=BackupTargetType.OBJECT, extra=None
):
"""
Creates a new backup target from an existing storage container
:param node: The Container to backup
:type node: ``Container``
:param type: Backup target type (Physical, Virtual, ...).
:type type: :class:`BackupTargetType`
:param extra: (optional) Extra attributes (driver specific).
:type extra: ``dict``
:rtype: Instance of :class:`.BackupTarget`
"""
return self.create_target(
name=container.name, address=container.get_cdn_url(), type=type, extra=None
)
def update_target(self, target, name, address, extra):
"""
Update the properties of a backup target
:param target: Backup target to update
:type target: Instance of :class:`.BackupTarget`
:param name: Name of the target
:type name: ``str``
:param address: Hostname, FQDN, IP, file path etc.
:type address: ``str``
:param extra: (optional) Extra attributes (driver specific).
:type extra: ``dict``
:rtype: Instance of :class:`.BackupTarget`
"""
raise NotImplementedError("update_target not implemented for this driver")
def delete_target(self, target):
"""
Delete a backup target
:param target: Backup target to delete
:type target: Instance of :class:`.BackupTarget`
"""
raise NotImplementedError("delete_target not implemented for this driver")
def list_recovery_points(self, target, start_date=None, end_date=None):
"""
List the recovery points available for a target
:param target: Backup target to delete
:type target: Instance of :class:`.BackupTarget`
:param start_date: The start date to show jobs between (optional)
:type start_date: :class:`datetime.datetime`
:param end_date: The end date to show jobs between (optional)
:type end_date: :class:`datetime.datetime``
:rtype: ``list`` of :class:`.BackupTargetRecoveryPoint`
"""
raise NotImplementedError(
"list_recovery_points not implemented for this driver"
)
def recover_target(self, target, recovery_point, path=None):
"""
Recover a backup target to a recovery point
:param target: Backup target to delete
:type target: Instance of :class:`.BackupTarget`
:param recovery_point: Backup target with the backup data
:type recovery_point: Instance of :class:`.BackupTarget`
:param path: The part of the recovery point to recover (optional)
:type path: ``str``
:rtype: Instance of :class:`.BackupTargetJob`
"""
raise NotImplementedError("recover_target not implemented for this driver")
def recover_target_out_of_place(
self, target, recovery_point, recovery_target, path=None
):
"""
Recover a backup target to a recovery point out-of-place
:param target: Backup target with the backup data
:type target: Instance of :class:`.BackupTarget`
:param recovery_point: Backup target with the backup data
:type recovery_point: Instance of :class:`.BackupTarget`
:param recovery_target: Backup target with to recover the data to
:type recovery_target: Instance of :class:`.BackupTarget`
:param path: The part of the recovery point to recover (optional)
:type path: ``str``
:rtype: Instance of :class:`BackupTargetJob`
"""
raise NotImplementedError(
"recover_target_out_of_place not implemented for this driver"
)
def get_target_job(self, target, id):
"""
Get a specific backup job by ID
:param target: Backup target with the backup data
:type target: Instance of :class:`.BackupTarget`
:param id: Backup target with the backup data
:type id: Instance of :class:`.BackupTarget`
:rtype: :class:`BackupTargetJob`
"""
jobs = self.list_target_jobs(target)
return list(filter(lambda x: x.id == id, jobs))[0]
def list_target_jobs(self, target):
"""
List the backup jobs on a target
:param target: Backup target with the backup data
:type target: Instance of :class:`.BackupTarget`
:rtype: ``list`` of :class:`.BackupTargetJob`
"""
raise NotImplementedError("list_target_jobs not implemented for this driver")
def create_target_job(self, target, extra=None):
"""
Create a new backup job on a target
:param target: Backup target with the backup data
:type target: Instance of :class:`.BackupTarget`
:param extra: (optional) Extra attributes (driver specific).
:type extra: ``dict``
:rtype: Instance of :class:`BackupTargetJob`
"""
raise NotImplementedError("create_target_job not implemented for this driver")
def resume_target_job(self, job):
"""
Resume a suspended backup job on a target
:param target: Backup target with the backup data
:type target: Instance of :class:`.BackupTarget`
:param job: Backup target job to resume
:type job: Instance of :class:`.BackupTargetJob`
:rtype: ``bool``
"""
raise NotImplementedError("resume_target_job not implemented for this driver")
def suspend_target_job(self, job):
"""
Suspend a running backup job on a target
:param target: Backup target with the backup data
:type target: Instance of :class:`.BackupTarget`
:param job: Backup target job to suspend
:type job: Instance of :class:`.BackupTargetJob`
:rtype: ``bool``
"""
raise NotImplementedError("suspend_target_job not implemented for this driver")
def cancel_target_job(self, job):
"""
Cancel a backup job on a target
:param target: Backup target with the backup data
:type target: Instance of :class:`.BackupTarget`
:param job: Backup target job to cancel
:type job: Instance of :class:`.BackupTargetJob`
:rtype: ``bool``
"""
raise NotImplementedError("cancel_target_job not implemented for this driver")
|
apache/libcloud
|
libcloud/backup/base.py
|
Python
|
apache-2.0
| 15,294
| 0.001177
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from GestureAgentsTUIO.Tuio import TuioAgentGenerator
import GestureAgentsPygame.Screen as Screen
from pygame.locals import *
class MouseAsTuioAgentGenerator(object):
def __init__(self):
self.pressed = False
self.myagent = None
self.sid = -1
self.screensize = Screen.size
def event(self, e):
if e.type == MOUSEBUTTONDOWN:
self.pressed = True
self.myagent = TuioAgentGenerator.makeCursorAgent()
self._updateAgent(self.myagent, e)
self.myagent.newAgent(self.myagent)
self.myagent.newCursor(self.myagent)
elif e.type == MOUSEBUTTONUP:
self.pressed = False
self._updateAgent(self.myagent, e)
self.myagent.removeCursor(self.myagent)
self.myagent.finish()
self.myagent = None
elif e.type == MOUSEMOTION:
if self.pressed:
self._updateAgent(self.myagent, e)
self.myagent.updateCursor(self.myagent)
def _updateAgent(self, a, e):
a.pos = e.pos
a.posx = e.pos[0]
a.posy = e.pos[1]
a.sessionid = self.sid
a.xmot = 0
a.ymot = 0
a.mot_accel = 0
|
chaosct/GestureAgents
|
GestureAgentsPygame/Mouse.py
|
Python
|
mit
| 1,272
| 0
|
from django import forms
from django.core.files import File
from django.conf import settings
from .widgets import FPFileWidget
import urllib2
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
class FPFieldMixin():
widget = FPFileWidget
default_mimetypes = "*/*"
def initialize(self, apikey=None, mimetypes=None, services=None, additional_params=None):
"""
Initializes the Filepicker field.
Valid arguments:
* apikey. This string is required if it isn't set as settings.FILEPICKER_API_KEY
* mimetypes. Optional, the allowed mimetypes for files. Defaults to "*/*" (all files)
* services. Optional, the allowed services to pull from.
* additional_params. Optional, additional parameters to be applied.
"""
self.apikey = apikey or getattr(settings, 'FILEPICKER_API_KEY', None)
if not self.apikey:
raise Exception("Cannot find filepicker.io api key." +
" Be sure to either pass as the apikey argument when creating the FPFileField," +
" or set it as settings.FILEPICKER_API_KEY. To get a key, go to https://filepicker.io")
self.mimetypes = mimetypes or self.default_mimetypes
if not isinstance(self.mimetypes, basestring):
#If mimetypes is an array, form a csv string
try:
self.mimetypes = ",".join(iter(self.mimetypes))
except TypeError:
self.mimetypes = str(self.mimetypes)
self.services = services or getattr(settings, 'FILEPICKER_SERVICES', None)
self.additional_params = additional_params or getattr(settings, 'FILEPICKER_ADDITIONAL_PARAMS', None)
def widget_attrs(self, widget):
attrs = {
'data-fp-apikey': self.apikey,
'data-fp-mimetypes': self.mimetypes,
}
if self.services:
attrs['data-fp-option-services'] = self.services
if self.additional_params:
attrs = dict(attrs.items() + self.additional_params.items())
return attrs
class FPUrlField(FPFieldMixin, forms.URLField):
widget = FPFileWidget
default_mimetypes = "*/*"
def __init__(self, *args, **kwargs):
"""
Initializes the Filepicker url field.
Valid arguments:
* apikey. This string is required if it isn't set as settings.FILEPICKER_API_KEY
* mimetypes. Optional, the allowed mimetypes for files. Defaults to "*/*" (all files)
* services. Optional, the allowed services to pull from.
* additional_params. Optional, additional parameters to be applied.
"""
self.initialize(
apikey=kwargs.pop('apikey', None),
mimetypes=kwargs.pop('mimetypes', None),
services=kwargs.pop('services', None),
additional_params=kwargs.pop('additional_params', None),
)
super(FPUrlField, self).__init__(*args, **kwargs)
class FPFileField(FPFieldMixin, forms.FileField):
def __init__(self, *args, **kwargs):
"""
Initializes the Filepicker url field.
Valid arguments:
* apikey. This string is required if it isn't set as settings.FILEPICKER_API_KEY
* mimetypes. Optional, the allowed mimetypes for files. Defaults to "*/*" (all files)
* services. Optional, the allowed services to pull from.
* additional_params. Optional, additional parameters to be applied.
"""
self.initialize(
apikey=kwargs.pop('apikey', None),
mimetypes=kwargs.pop('mimetypes', None),
services=kwargs.pop('services', None),
additional_params=kwargs.pop('additional_params', None),
)
super(FPFileField, self).__init__(*args, **kwargs)
def to_python(self, data):
"""Takes the url in data and creates a File object"""
if not data or not data.startswith('http'):
return None
url_fp = urllib2.urlopen(data)
name = "fp-file"
disposition = url_fp.info().getheader('Content-Disposition')
if disposition:
name = disposition.rpartition("filename=")[2].strip('" ')
filename = url_fp.info().getheader('X-File-Name')
if filename:
name = filename
size = long(url_fp.info().getheader('Content-Length', 0))
fp = File(StringIO(url_fp.read()), name=name)
fp.size = size
return fp
|
thethomaseffect/travers-media-tools
|
django_filepicker/forms.py
|
Python
|
mit
| 4,508
| 0.003327
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
"""
「石取りゲーム1」(「C言語による最新アルゴリズム事典」6ページより)
"""
def get_num(message):
sys.stdout.write(message)
n = ''
while (not n.isdigit()):
n = raw_input()
return int(n)
n = get_num(u"石の数?")
m = get_num(u"1回に取れる最大の石の数?")
if (n < 1 or m < 1):
sys.exit(1)
my_turn = True
while(n != 0):
if (my_turn):
x = (n - 1) % (m + 1)
if (x == 0):
x = 1
print(u"私は%d個の石を取ります." % x)
else:
r = False
while (not r or x <= 0 or m < x or n < x):
x = get_num(u"何個取りますか?")
r = True
n -= x
print(u"石の残り: %d個" % n)
my_turn = not my_turn
if (my_turn):
print(u"あなたの負けです!")
else:
print(u"私の負けです!")
|
ryu22e/algorithm1000
|
ishi/ishi1.py
|
Python
|
mit
| 922
| 0.001337
|
import numpy as np
### 1: IH
### 2: EH
### 3: AE
### 4: AO
### 6: AH
### 7: UH
###
### 11: iyC beat
### 12: iyF be
### 21: eyC bait
### 22: eyF bay
### 41: ayV buy
### 47: ayO bite
### 61: oy boy
### 42: aw bough
### 62: owC boat
### 63: owF bow
### 72: uwC boot
### 73: uwF too
### 82: iw suit
### 43: ah father
### 53: oh bought
MEANS = {'1': [525.2877, 1941.229, 4.429892, 4.79822],
'2': [663.079, 1847.689, 4.542639, 4.867548],
'3': [728.7752, 1893.15, 4.615404, 4.989836],
'5': [810.9155, 1336.642, 4.839556, 4.933221],
'6': [714.8795, 1448.528, 4.671836, 4.831577],
'7': [543.7372, 1288.546, 4.424041, 4.69879],
'11': [411.3944, 2275.093, 4.39259, 4.796765],
'12': [440.3333, 2197.091, 4.476098, 4.667141],
'14': [441.1307, 2233.011, 4.403062, 4.860253],
'21': [599.2163, 1978.542, 4.40423, 4.894037],
'22': [592.5507, 1959.899, 4.087865, 4.802181],
'24': [569.8613, 1991.994, 4.516866, 4.941955],
'33': [620.1378, 2059.224, 4.347911, 5.027536],
'39': [766.087, 1829.261, 4.693657, 5.013284],
'41': [808.0645, 1449.711, 4.8443, 4.95776],
'42': [782.8331, 1672.451, 4.788051, 5.007045],
'43': [780.8631, 1326.295, 4.705, 4.908504],
'44': [718.2819, 1273.59, 4.702502, 4.840136],
'47': [777.8737, 1478.317, 4.718795, 4.919554],
'53': [740.2186, 1167.617, 4.744859, 4.794929],
'54': [557.1122, 975.9273, 4.660808, 4.762645],
'61': [539.7747, 982.9505, 4.592872, 4.76811],
'62': [628.9169, 1342.568, 4.514038, 4.772455],
'63': [620.0192, 1332.923, 4.057321, 4.700364],
'64': [528.9181, 953.4962, 4.608001, 4.762555],
'72': [452.4824, 1282.609, 4.364288, 4.775122],
'73': [445.9345, 1819.35, 4.312133, 4.828277],
'74': [467.2353, 1204.176, 4.356453, 4.634414],
'82': [416.0622, 1873.91, 4.364174, 4.858582],
'94': [553.9443, 1486.107, 4.564759, 4.965292]}
COVS = {'1': np.matrix([[8156.961,4075.974,13.05440,6.823964],
[4075.974,85957.07,23.81249,31.39354],
[13.05440,23.81249,0.425308,0.02637788],
[6.823964,31.39354,0.02637788,0.2685742]]),
'2': np.matrix([[12610.98, 4598.212, 15.72022, 10.93343],
[4598.212, 76695.1, 35.53953, 32.24821],
[15.72022, 35.53953, 0.3856857, 0.04138077],
[10.93343, 32.24821, 0.04138077, 0.2402458]]),
'3': np.matrix([[20287.03, -945.841, 23.69788, 19.12778],
[-945.841, 85500.7, 32.35261, 42.61164],
[23.69788, 32.35261, 0.408185, 0.05798509],
[19.12778, 42.61164, 0.05798509, 0.2402007]]),
'5': np.matrix([[14899.38, 14764.41, 31.29953, 25.64715],
[14764.41, 33089.55, 30.80144, 35.65717],
[31.29953, 30.80144, 0.3399745, 0.1391051],
[25.64715, 35.65717, 0.1391051, 0.2939521]]),
'6': np.matrix([[10963.32, 11881.45, 24.02174, 14.15601],
[11881.45, 50941.8, 30.80307, 29.26477],
[24.02174, 30.80307, 0.356582, 0.08377454],
[14.15601, 29.26477, 0.08377454, 0.2798376]]),
'7': np.matrix([[7374.7, 6907.065, 14.77475, 6.575189],
[6907.065, 103775.4, -6.194884, 33.8729],
[14.77475, -6.194884, 0.3619565, 0.08537324],
[6.575189, 33.8729, 0.08537324, 0.3309069]]),
'11': np.matrix([[7398.308, 111.3878, 14.47063, 5.261133],
[111.3878, 112484.4, 4.204222, 27.97763],
[14.47063, 4.204222, 0.439087, 0.01820014],
[5.261133, 27.97763, 0.01820014, 0.2864814]]),
'12': np.matrix([[8980.604, 16.4375, 12.43177, 6.508381],
[16.4375, 68185.02, -41.39826, 43.07926],
[12.43177, -41.39826, 0.4922286, 0.04943888],
[6.508381, 43.07926, 0.04943888, 0.2746969]]),
'14': np.matrix([[5766.88, 1678.53, 13.6561, 6.172833],
[1678.53, 97981.07, -18.30658, 5.520951],
[13.6561, -18.30658, 0.391424, 0.02907505],
[6.172833, 5.520951, 0.02907505, 0.2467823]]),
'21': np.matrix([[11345.63, 902.1107, 15.79774, 8.412416],
[902.1107, 94016.08, 21.16553, 52.47692],
[15.79774, 21.16553, 0.3749903, 0.04202547],
[8.412416, 52.47692, 0.04202547, 0.2549386]]),
'22': np.matrix([[7981.016, 7101.174, 15.52651, 7.784475],
[7101.174, 67936.53, 30.4288, 81.06186],
[15.52651, 30.4288, 0.4057237, 0.07124884],
[7.784475, 81.06186, 0.07124884, 0.4493804]]),
'24': np.matrix([[7187.811, 4778.768, 11.81843, 8.616023],
[4778.768, 97292.62, 24.02699, 46.71447],
[11.81843, 24.02699, 0.3862976, 0.05487306],
[8.616023, 46.71447, 0.05487306, 0.2361443]]),
'33': np.matrix([[13020.63, -808.1123, 24.56315, 8.443287],
[-808.1123, 86325.97, 40.21192, 34.7022],
[24.56315, 40.21192, 0.4743995, 0.04472998],
[8.443287, 34.7022, 0.04472998, 0.2473551]]),
'39': np.matrix([[9703.72, 5470.067, 24.62053, 27.96038],
[5470.067, 24951.84, 1.931964, 29.95240],
[24.62053, 1.931964, 0.2513445, 0.06440874],
[27.96038, 29.95240, 0.06440874, 0.1886862]]),
'41': np.matrix([[15762.87, 13486.23, 34.61164, 22.15451],
[13486.23, 36003.67, 33.8431, 30.52712],
[34.61164, 33.8431, 0.4143354, 0.1125765],
[22.15451, 30.52712, 0.1125765, 0.2592451]]),
'42': np.matrix([[17034.35, 8582.368, 28.08871, 21.32564],
[8582.368, 83324.55, 22.75919, 38.33975],
[28.08871, 22.75919, 0.3619946, 0.06974927],
[21.32564, 38.33975, 0.06974927, 0.2425371]]),
'43': np.matrix([[12651.21, 14322.93, 32.66122, 27.76152],
[14322.93, 31322.54, 35.98834, 42.55531],
[32.66122, 35.98834, 0.3651260, 0.1821268],
[27.76152, 42.55531, 0.1821268, 0.3104338]]),
'44': np.matrix([[11222.69, 12217.39, 25.91937, 20.97844],
[12217.39, 42712.38, 31.49909, 51.63623],
[25.91937, 31.49909, 0.3007976, 0.1284959],
[20.97844, 51.63623, 0.1284959, 0.3128419]]),
'47': np.matrix([[14093.57, 9982.23, 34.45142, 19.68046],
[9982.23, 45110.74, 35.51612, 32.38417],
[34.45142, 35.51612, 0.3875129, 0.1126590],
[19.68046, 32.38417, 0.1126590, 0.2684052]]),
'53': np.matrix([[13901.81, 14774.98, 29.65039, 23.37561],
[14774.98, 28293.08, 26.55524, 28.10525],
[29.65039, 26.55524, 0.3192664, 0.1368551],
[23.37561, 28.10525, 0.1368551, 0.3102375]]),
'54': np.matrix([[9024.312, 11004.40, 14.01676, 6.774474],
[11004.40, 31347.50, 0.5099728, 1.338353],
[14.01676, 0.5099728, 0.3226124, 0.1001887],
[6.774474, 1.338353, 0.1001887, 0.3517336]]),
'61': np.matrix([[8717.966, 8360.663, 9.581423, -3.629271],
[8360.663, 32997.70, -18.37126, -13.78926],
[9.581423, -18.37126, 0.31812, 0.09862598],
[-3.629271, -13.78926, 0.09862598, 0.3626406]]),
'62': np.matrix([[11036.78, 18957.63, 21.16886, 10.91295],
[18957.63, 86701.64, 15.58485, 35.06782],
[21.16886, 15.58485, 0.3620286, 0.08347947],
[10.91295, 35.06782, 0.08347947, 0.2859568]]),
'63': np.matrix([[11190.96, 16442.24, 34.42818, 9.032116],
[16442.24, 53108.15, 44.34654, 47.59889],
[34.42818, 44.34654, 0.2837371, -0.000626268],
[9.032116, 47.59889, -0.000626268, 0.4513407]]),
'64': np.matrix([[7020.379, 9304.635, 11.09179, 2.643800],
[9304.635, 34884.03, -2.304886, -0.4383724],
[11.09179, -2.304886, 0.3025123, 0.09179999],
[2.643800, -0.4383724, 0.09179999, 0.3638192]]),
'72': np.matrix([[5302.16, 8112.09, 11.229, -1.767770],
[8112.09, 142019.8, -1.869954, 25.76638],
[11.229, -1.869954, 0.4222974, 0.03546093],
[-1.767770, 25.76638, 0.03546093, 0.3773977]]),
'73': np.matrix([[5441.397, 6032.27, 6.348957, 0.7710968],
[6032.27, 89482.47, -10.52576, 19.44117],
[6.348957, -10.52576, 0.418909, 0.01018179],
[0.7710968, 19.44117, 0.01018179, 0.2577171]]),
'74': np.matrix([[3658.316, -3584.357, 6.224247, 9.464968],
[-3584.357, 51303.03, -78.23124, 26.34888],
[6.224247, -78.23124, 0.3590685, 0.04111837],
[9.464968, 26.34888, 0.04111837, 0.2657895]]),
'82': np.matrix([[5067.216, 3725.284, 8.112584, -2.087986],
[3725.284, 95441.09, 4.191305, 8.484181],
[8.112584, 4.191305, 0.4392269, 0.02049446],
[-2.087986, 8.484181, 0.02049446, 0.284428]]),
'94': np.matrix([[7035.538, 4075.101, 14.86012, 4.748889],
[4075.101, 41818.21, 26.42395, 26.1902],
[14.86012, 26.42395, 0.3585293, 0.03962729],
[4.748889, 26.1902, 0.03962729, 0.2598092]])}
|
mmcauliffe/linguistic-helper-functions
|
linghelper/phonetics/vowels/mahalanobis.py
|
Python
|
gpl-3.0
| 11,375
| 0.008176
|
"""
File: tonal_permutation.py
Purpose: Class defining a function on one tonality based on a permatuation specification.
"""
from transformation.functions.tonalfunctions.tonal_permutation import TonalPermutation
class TonalityPermutation(TonalPermutation):
"""
Class implementation of a permutation on a set of tones.
More restrictive than Permutation, in that tones of a given tonality are the elements of the permutation.
"""
def __init__(self, tonality, cycles=list()):
"""
Constructor
:param tonality:
:param cycles:
"""
self._tonality = tonality
domain_tones = tonality.annotation[:len(tonality.annotation) - 1]
TonalPermutation.__init__(self, cycles, domain_tones)
@property
def tonality(self):
return self._tonality
|
dpazel/music_rep
|
transformation/functions/tonalfunctions/tonality_permutation.py
|
Python
|
mit
| 830
| 0.003614
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
from frappe.contacts.address_and_contact import load_address_and_contact
STANDARD_USERS = ("Guest", "Administrator")
class Member(Document):
def onload(self):
"""Load address and contacts in `__onload`"""
load_address_and_contact(self)
def validate(self):
if self.name not in STANDARD_USERS:
self.validate_email_type(self.email)
self.validate_email_type(self.name)
def validate_email_type(self, email):
from frappe.utils import validate_email_add
validate_email_add(email.strip(), True)
|
indictranstech/erpnext
|
erpnext/non_profit/doctype/member/member.py
|
Python
|
agpl-3.0
| 738
| 0.01897
|
from behave import given, when, then
from genosdb.models import User
from genosdb.exceptions import UserNotFound
# 'mongodb://localhost:27017/')
@given('a valid user with values {username}, {password}, {email}, {first_name}, {last_name}')
def step_impl(context, username, password, email, first_name, last_name):
context.base_user = User(username=username, email=email, password=password, first_name=first_name,
last_name=last_name)
@when('I add the user to the collection')
def step_impl(context):
context.user_service.save(context.base_user)
@then('I check {user_name} exists')
def step_impl(context, user_name):
user_exists = context.user_service.exists(user_name)
assert context.base_user.username == user_exists['username']
assert context.base_user.password == user_exists['password']
assert context.base_user.email == user_exists['email']
assert context.base_user.first_name == user_exists['first_name']
assert context.base_user.last_name == user_exists['last_name']
assert user_exists['_id'] is not None
@given('I update {username} {field} with {value}')
def step_impl(context, username, field, value):
user = context.user_service.exists(username)
if user is not None:
user[field] = value
context.user_service.update(user.to_json())
else:
raise UserNotFound(username, "User was not found")
@then('I check {username} {field} is {value}')
def step_impl(context, username, field, value):
user = context.user_service.exists(username)
if user is not None:
assert user[field] == value
else:
raise UserNotFound(username, "User was not found")
|
jonrf93/genos
|
dbservices/tests/functional_tests/steps/user_service_steps.py
|
Python
|
mit
| 1,685
| 0.003561
|
import sys
from arrowhead.core import Step
from arrowhead.core import ErrorArrow
from arrowhead.core import NormalArrow
from arrowhead.core import ValueArrow
def print_flow_state(flow, active_step_name=None, file=sys.stdout):
"""
Display the state of a given flow.
:param flow:
A Flow, instance or class
:param active_step_name:
(optional) name of the active step
:param file:
(optional) file to print to (defaults to sys.stdout)
This function actually prints() a developer-friendly version of the state
of the entire flow. The output is composed of many lines. The output will
contain all of the internal state of the flow (may print stuff like
passwords if you stored any).
"""
# show flow name
print("[{}]".format(flow.Meta.name).center(40, "~"), file=file)
# show flow global state
needs_header = True
for f_k, f_v in flow.__dict__.items():
# private stuff is private
if f_k.startswith("_"):
continue
# steps are handled later
if (isinstance(f_v, Step) or
(isinstance(f_v, type) and issubclass(f_v, Step))):
continue
# skip Meta
if f_k == 'Meta':
continue
if needs_header:
print("STATE:", file=file)
needs_header = False
print("{indent}{key}: {value!r}".format(
indent=" " * 4, key=f_k, value=f_v
), file=file)
# show a list of all the steps, their state as well as a marker that
# shows where we actively are
print("STEPS:", file=file)
for name in flow.Meta.steps.keys():
step = getattr(flow, name)
flags = []
if step.Meta.accepting:
flags.append('A')
if step.Meta.initial == name:
flags.append('I')
if flags:
rendered_flags = " ({})".format(''.join(flags))
else:
rendered_flags = ""
if step.Meta.name == active_step_name:
indent = " => "
else:
indent = " "
print("{indent}{step}{flags:4}".format(
indent=indent, flags=rendered_flags, step=step.Meta.label
), file=file)
needs_header = False
for s_k, s_v in step.__dict__.items():
if s_k.startswith("_"):
continue
# skip Meta
if s_k == 'Meta':
continue
if needs_header:
print("STATE:", file=file)
needs_header = False
print("{indent}{key}: {value!r}".format(
indent=" " * 8, key=s_k, value=s_v
), file=file)
print("." * 40, file=file)
def print_dot_graph(flow, active_step_name=None, file=sys.stdout):
"""
Print the dot(1) description of a given flow.
:param flow:
A Flow, instance or class
:param active_step_name:
(optional) name of the active step
:param file:
(optional) file to print to (defaults to sys.stdout)
"""
print('digraph {', file=file)
print('\tnode [shape=box, color=black];', file=file)
print('\tedge [arrowsize=0.5];', file=file)
print(file=file)
print('\tsubgraph {', file=file)
print('\t\tnode [shape=plaintext];', file=file)
# NOTE: levels + 2 because 0 and max are
# for _start and _end that are not
# represented anywhere in the flow. We
# just add them for graphviz
print('\t\t{};'.format(
' -> '.join(str(i) for i in range(flow.Meta.levels + 2))
), file=file)
print('\t}', file=file)
print(file=file)
# NOTE: levels + 2 as above
levels = {i: [] for i in range(flow.Meta.levels + 2)}
levels[0].append('_start')
# NOTE: levels + 1 is the last element
levels[flow.Meta.levels + 1].append('_end')
for step in flow.Meta.steps.values():
levels[step.Meta.level].append(step.Meta.name)
for level, steps in sorted(levels.items()):
print('\t{{ rank=same; {}; {}; }}'.format(
level, '; '.join(steps)
), file=file)
print(file=file)
if active_step_name == '_start':
print('\t_start [shape=circle, style=filled,'
' fillcolor=blue, label=""];', file=file)
else:
print('\t_start [shape=circle, style=filled,'
' fillcolor=black, label=""];', file=file)
for step in flow.Meta.steps.values():
if step.Meta.initial:
print('\t_start -> {};'.format(step.Meta.name), file=file)
print(file=file)
for step in flow.Meta.steps.values():
if active_step_name == step.Meta.name:
print('\t{} [shape={}, label="{}", style=filled, fillcolor=blue, fontcolor=white];'.format(
step.Meta.name, "box",
step.Meta.label.replace('"', '\\"')
), file=file)
else:
print('\t{} [shape={}, label="{}"];'.format(
step.Meta.name, "box",
step.Meta.label.replace('"', '\\"')
), file=file)
for arrow in step.Meta.arrows:
if isinstance(arrow, NormalArrow):
print('\t{} -> {};'.format(
step.Meta.name, arrow.target
), file=file)
elif isinstance(arrow, ValueArrow):
print('\t{} -> {} [label="{}", color=green];'.format(
step.Meta.name, arrow.target, arrow.value
), file=file)
elif isinstance(arrow, ErrorArrow):
print('\t{} -> {} [label="{}", color=red];'.format(
step.Meta.name, arrow.target, arrow.error.__name__
), file=file)
print(file=file)
if active_step_name == '_end':
print('\t_end [shape=doublecircle, style=filled, '
'fillcolor=blue, label=""];', file=file)
else:
print('\t_end [shape=doublecircle, style=filled, '
'fillcolor=black, label=""];', file=file)
for step in flow.Meta.steps.values():
if step.Meta.accepting:
print('\t{} -> _end;'.format(step.Meta.name), file=file)
print("}", file=file)
|
zyga/arrowhead
|
arrowhead/inspector.py
|
Python
|
bsd-3-clause
| 6,120
| 0.000163
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""This file contains everything needed to interface with JUnit"""
#####
# pyCheck
#
# Copyright 2012, erebos42 (https://github.com/erebos42/miscScripts)
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this software; if not, write to the Free
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA, or see the FSF site: http://www.fsf.org.
#####
class IfJUnit(object):
"""Main class for the JUnit interface"""
def __init__(self):
pass
def __del__(self):
pass
def run_junit_test(self, project_dir, checkstyle_config, exercise):
pass
|
m-wichmann/pyCheck
|
src/ifJUnit.py
|
Python
|
lgpl-3.0
| 1,199
| 0.000834
|
self.description = "CleanMethod = KeepCurrent"
sp = pmpkg("dummy", "2.0-1")
self.addpkg2db("sync", sp)
sp = pmpkg("bar", "2.0-1")
self.addpkg2db("sync", sp)
sp = pmpkg("baz", "2.0-1")
self.addpkg2db("sync", sp)
lp = pmpkg("dummy", "1.0-1")
self.addpkg2db("local", lp)
lp = pmpkg("bar", "2.0-1")
self.addpkg2db("local", lp)
self.args = "-Sc"
self.option['CleanMethod'] = ['KeepCurrent']
self.createlocalpkgs = True
self.addrule("PACMAN_RETCODE=0")
self.addrule("CACHE_EXISTS=dummy|2.0-1")
self.addrule("!CACHE_EXISTS=dummy|1.0-1")
self.addrule("CACHE_EXISTS=bar|2.0-1")
self.addrule("CACHE_EXISTS=baz|2.0-1")
|
vodik/pacman
|
test/pacman/tests/clean002.py
|
Python
|
gpl-2.0
| 615
| 0
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
import paddle.nn.functional as F
from op_test import OpTest
paddle.enable_static()
np.random.seed(1)
def maxout_forward_naive(x, groups, channel_axis):
s0, s1, s2, s3 = x.shape
if channel_axis == 1:
return np.ndarray([s0, s1 // groups, groups, s2, s3], \
buffer = x, dtype=x.dtype).max(axis=2)
return np.ndarray([s0, s1, s2, s3 // groups, groups], \
buffer = x, dtype=x.dtype).max(axis=4)
class TestMaxOutOp(OpTest):
def setUp(self):
self.op_type = "maxout"
self.dtype = 'float64'
self.shape = [3, 6, 2, 4]
self.groups = 2
self.axis = 1
self.set_attrs()
x = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
out = maxout_forward_naive(x, self.groups, self.axis)
self.inputs = {'X': x}
self.attrs = {'groups': self.groups, 'axis': self.axis}
self.outputs = {'Out': out}
def set_attrs(self):
pass
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
class TestMaxOutOpAxis0(TestMaxOutOp):
def set_attrs(self):
self.axis = -1
class TestMaxOutOpAxis1(TestMaxOutOp):
def set_attrs(self):
self.axis = 3
class TestMaxOutOpFP32(TestMaxOutOp):
def set_attrs(self):
self.dtype = 'float32'
class TestMaxOutOpGroups(TestMaxOutOp):
def set_attrs(self):
self.groups = 3
class TestMaxoutAPI(unittest.TestCase):
# test paddle.nn.Maxout, paddle.nn.functional.maxout
def setUp(self):
self.x_np = np.random.uniform(-1, 1, [2, 6, 5, 4]).astype(np.float64)
self.groups = 2
self.axis = 1
self.place=paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
else paddle.CPUPlace()
def test_static_api(self):
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.fluid.data('X', self.x_np.shape, self.x_np.dtype)
out1 = F.maxout(x, self.groups, self.axis)
m = paddle.nn.Maxout(self.groups, self.axis)
out2 = m(x)
exe = paddle.static.Executor(self.place)
res = exe.run(feed={'X': self.x_np}, fetch_list=[out1, out2])
out_ref = maxout_forward_naive(self.x_np, self.groups, self.axis)
for r in res:
self.assertTrue(np.allclose(out_ref, r))
def test_dygraph_api(self):
paddle.disable_static(self.place)
x = paddle.to_tensor(self.x_np)
out1 = F.maxout(x, self.groups, self.axis)
m = paddle.nn.Maxout(self.groups, self.axis)
out2 = m(x)
out_ref = maxout_forward_naive(self.x_np, self.groups, self.axis)
for r in [out1, out2]:
self.assertTrue(np.allclose(out_ref, r.numpy()))
out3 = F.maxout(x, self.groups, -1)
out3_ref = maxout_forward_naive(self.x_np, self.groups, -1)
self.assertTrue(np.allclose(out3_ref, out3.numpy()))
paddle.enable_static()
def test_fluid_api(self):
with fluid.program_guard(fluid.Program()):
x = fluid.data('X', self.x_np.shape, self.x_np.dtype)
out = fluid.layers.maxout(x, groups=self.groups, axis=self.axis)
exe = fluid.Executor(self.place)
res = exe.run(feed={'X': self.x_np}, fetch_list=[out])
out_ref = maxout_forward_naive(self.x_np, self.groups, self.axis)
self.assertTrue(np.allclose(out_ref, res[0]))
paddle.disable_static(self.place)
x = paddle.to_tensor(self.x_np)
out = paddle.fluid.layers.maxout(x, groups=self.groups, axis=self.axis)
self.assertTrue(np.allclose(out_ref, out.numpy()))
paddle.enable_static()
def test_errors(self):
with paddle.static.program_guard(paddle.static.Program()):
# The input type must be Variable.
self.assertRaises(TypeError, F.maxout, 1)
# The input dtype must be float16, float32, float64.
x_int32 = paddle.fluid.data(
name='x_int32', shape=[2, 4, 6, 8], dtype='int32')
self.assertRaises(TypeError, F.maxout, x_int32)
x_float32 = paddle.fluid.data(name='x_float32', shape=[2, 4, 6, 8])
self.assertRaises(ValueError, F.maxout, x_float32, 2, 2)
if __name__ == '__main__':
unittest.main()
|
luotao1/Paddle
|
python/paddle/fluid/tests/unittests/test_maxout_op.py
|
Python
|
apache-2.0
| 5,104
| 0.001763
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
import io
import re
from glob import glob
from os.path import basename
from os.path import dirname
from os.path import join
from os.path import splitext
from setuptools import find_packages
from setuptools import setup
def read(*names, **kwargs):
return io.open(
join(dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8')
).read()
setup(
name='mfs',
version='0.1.0',
license='MIT license',
description='mfs is a set of utilities to ease image download from some Russian modelling forums',
long_description='%s\n%s' % (
re.compile('^.. start-badges.*^.. end-badges', re.M | re.S).sub('', read('README.rst')),
re.sub(':[a-z]+:`~?(.*?)`', r'``\1``', read('CHANGELOG.rst'))
),
author='Alexandre Ovtchinnikov',
author_email='abc@miroag.com',
url='https://github.com/miroag/mfs',
packages=find_packages('src'),
package_dir={'': 'src'},
py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
include_package_data=True,
zip_safe=False,
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
# 'Programming Language :: Python :: 2.7',
# 'Programming Language :: Python :: 3',
# 'Programming Language :: Python :: 3.3',
# 'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
# uncomment if you test on these interpreters:
# 'Programming Language :: Python :: Implementation :: IronPython',
# 'Programming Language :: Python :: Implementation :: Jython',
# 'Programming Language :: Python :: Implementation :: Stackless',
'Topic :: Utilities',
],
keywords=[
# eg: 'keyword1', 'keyword2', 'keyword3',
],
install_requires=[
'beautifulsoup4', 'requests', 'aiohttp', 'tqdm', 'docopt'
],
setup_requires=[
'pytest-runner',
],
tests_require=[
'pytest',
],
extras_require={
# eg:
# 'rst': ['docutils>=0.11'],
# ':python_version=="2.6"': ['argparse'],
},
entry_points={
'console_scripts': [
'mfs = mfs.cli:main',
]
},
)
|
miroag/mfs
|
setup.py
|
Python
|
mit
| 2,863
| 0.001048
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUVMResync(NURESTObject):
""" Represents a VMResync in the VSD
Notes:
Provide information about the state of a VM resync request.
"""
__rest_name__ = "resync"
__resource_name__ = "resync"
## Constants
CONST_STATUS_IN_PROGRESS = "IN_PROGRESS"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_STATUS_SUCCESS = "SUCCESS"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a VMResync instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> vmresync = NUVMResync(id=u'xxxx-xxx-xxx-xxx', name=u'VMResync')
>>> vmresync = NUVMResync(data=my_dict)
"""
super(NUVMResync, self).__init__()
# Read/Write Attributes
self._last_request_timestamp = None
self._last_time_resync_initiated = None
self._last_updated_by = None
self._last_updated_date = None
self._embedded_metadata = None
self._entity_scope = None
self._creation_date = None
self._status = None
self._owner = None
self._external_id = None
self.expose_attribute(local_name="last_request_timestamp", remote_name="lastRequestTimestamp", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_time_resync_initiated", remote_name="lastTimeResyncInitiated", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="status", remote_name="status", attribute_type=str, is_required=False, is_unique=False, choices=[u'IN_PROGRESS', u'SUCCESS'])
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_request_timestamp(self):
""" Get last_request_timestamp value.
Notes:
Time of the last timestamp received
This attribute is named `lastRequestTimestamp` in VSD API.
"""
return self._last_request_timestamp
@last_request_timestamp.setter
def last_request_timestamp(self, value):
""" Set last_request_timestamp value.
Notes:
Time of the last timestamp received
This attribute is named `lastRequestTimestamp` in VSD API.
"""
self._last_request_timestamp = value
@property
def last_time_resync_initiated(self):
""" Get last_time_resync_initiated value.
Notes:
Time that the resync was initiated
This attribute is named `lastTimeResyncInitiated` in VSD API.
"""
return self._last_time_resync_initiated
@last_time_resync_initiated.setter
def last_time_resync_initiated(self, value):
""" Set last_time_resync_initiated value.
Notes:
Time that the resync was initiated
This attribute is named `lastTimeResyncInitiated` in VSD API.
"""
self._last_time_resync_initiated = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def status(self):
""" Get status value.
Notes:
Status of the resync
"""
return self._status
@status.setter
def status(self, value):
""" Set status value.
Notes:
Status of the resync
"""
self._status = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
|
nuagenetworks/vspk-python
|
vspk/v6/nuvmresync.py
|
Python
|
bsd-3-clause
| 11,928
| 0.00897
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from multiprocessing.managers import SyncManager, DictProxy
import multiprocessing
import os
import tempfile
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.executor.play_iterator import PlayIterator
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.process.result import ResultProcess
from ansible.executor.stats import AggregateStats
from ansible.playbook.play_context import PlayContext
from ansible.plugins import callback_loader, strategy_loader, module_loader
from ansible.template import Templar
from ansible.vars.hostvars import HostVars
from ansible.plugins.callback import CallbackBase
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['TaskQueueManager']
class TaskQueueManager:
'''
This class handles the multiprocessing requirements of Ansible by
creating a pool of worker forks, a result handler fork, and a
manager object with shared datastructures/queues for coordinating
work between all processes.
The queue manager is responsible for loading the play strategy plugin,
which dispatches the Play's tasks to hosts.
'''
def __init__(self, inventory, variable_manager, loader, options, passwords, stdout_callback=None, run_additional_callbacks=True, run_tree=False):
self._inventory = inventory
self._variable_manager = variable_manager
self._loader = loader
self._options = options
self._stats = AggregateStats()
self.passwords = passwords
self._stdout_callback = stdout_callback
self._run_additional_callbacks = run_additional_callbacks
self._run_tree = run_tree
self._callbacks_loaded = False
self._callback_plugins = []
self._start_at_done = False
self._result_prc = None
# make sure the module path (if specified) is parsed and
# added to the module_loader object
if options.module_path is not None:
for path in options.module_path.split(os.pathsep):
module_loader.add_directory(path)
# a special flag to help us exit cleanly
self._terminated = False
# this dictionary is used to keep track of notified handlers
self._notified_handlers = dict()
# dictionaries to keep track of failed/unreachable hosts
self._failed_hosts = dict()
self._unreachable_hosts = dict()
self._final_q = multiprocessing.Queue()
# A temporary file (opened pre-fork) used by connection
# plugins for inter-process locking.
self._connection_lockfile = tempfile.TemporaryFile()
def _initialize_processes(self, num):
self._workers = []
for i in range(num):
main_q = multiprocessing.Queue()
rslt_q = multiprocessing.Queue()
self._workers.append([None, main_q, rslt_q])
self._result_prc = ResultProcess(self._final_q, self._workers)
self._result_prc.start()
def _initialize_notified_handlers(self, handlers):
'''
Clears and initializes the shared notified handlers dict with entries
for each handler in the play, which is an empty array that will contain
inventory hostnames for those hosts triggering the handler.
'''
# Zero the dictionary first by removing any entries there.
# Proxied dicts don't support iteritems, so we have to use keys()
for key in self._notified_handlers.keys():
del self._notified_handlers[key]
# FIXME: there is a block compile helper for this...
handler_list = []
for handler_block in handlers:
for handler in handler_block.block:
handler_list.append(handler)
# then initialize it with the handler names from the handler list
for handler in handler_list:
self._notified_handlers[handler.get_name()] = []
def load_callbacks(self):
'''
Loads all available callbacks, with the exception of those which
utilize the CALLBACK_TYPE option. When CALLBACK_TYPE is set to 'stdout',
only one such callback plugin will be loaded.
'''
if self._callbacks_loaded:
return
stdout_callback_loaded = False
if self._stdout_callback is None:
self._stdout_callback = C.DEFAULT_STDOUT_CALLBACK
if isinstance(self._stdout_callback, CallbackBase):
self._callback_plugins.append(self._stdout_callback)
stdout_callback_loaded = True
elif isinstance(self._stdout_callback, basestring):
if self._stdout_callback not in callback_loader:
raise AnsibleError("Invalid callback for stdout specified: %s" % self._stdout_callback)
else:
raise AnsibleError("callback must be an instance of CallbackBase or the name of a callback plugin")
for callback_plugin in callback_loader.all(class_only=True):
if hasattr(callback_plugin, 'CALLBACK_VERSION') and callback_plugin.CALLBACK_VERSION >= 2.0:
# we only allow one callback of type 'stdout' to be loaded, so check
# the name of the current plugin and type to see if we need to skip
# loading this callback plugin
callback_type = getattr(callback_plugin, 'CALLBACK_TYPE', None)
callback_needs_whitelist = getattr(callback_plugin, 'CALLBACK_NEEDS_WHITELIST', False)
(callback_name, _) = os.path.splitext(os.path.basename(callback_plugin._original_path))
if callback_type == 'stdout':
if callback_name != self._stdout_callback or stdout_callback_loaded:
continue
stdout_callback_loaded = True
elif callback_name == 'tree' and self._run_tree:
pass
elif not self._run_additional_callbacks or (callback_needs_whitelist and (C.DEFAULT_CALLBACK_WHITELIST is None or callback_name not in C.DEFAULT_CALLBACK_WHITELIST)):
continue
self._callback_plugins.append(callback_plugin())
self._callbacks_loaded = True
def run(self, play):
'''
Iterates over the roles/tasks in a play, using the given (or default)
strategy for queueing tasks. The default is the linear strategy, which
operates like classic Ansible by keeping all hosts in lock-step with
a given task (meaning no hosts move on to the next task until all hosts
are done with the current task).
'''
if not self._callbacks_loaded:
self.load_callbacks()
all_vars = self._variable_manager.get_vars(loader=self._loader, play=play)
templar = Templar(loader=self._loader, variables=all_vars)
new_play = play.copy()
new_play.post_validate(templar)
self.hostvars = HostVars(
inventory=self._inventory,
variable_manager=self._variable_manager,
loader=self._loader,
)
# Fork # of forks, # of hosts or serial, whichever is lowest
contenders = [self._options.forks, play.serial, len(self._inventory.get_hosts(new_play.hosts))]
contenders = [ v for v in contenders if v is not None and v > 0 ]
self._initialize_processes(min(contenders))
play_context = PlayContext(new_play, self._options, self.passwords, self._connection_lockfile.fileno())
for callback_plugin in self._callback_plugins:
if hasattr(callback_plugin, 'set_play_context'):
callback_plugin.set_play_context(play_context)
self.send_callback('v2_playbook_on_play_start', new_play)
# initialize the shared dictionary containing the notified handlers
self._initialize_notified_handlers(new_play.handlers)
# load the specified strategy (or the default linear one)
strategy = strategy_loader.get(new_play.strategy, self)
if strategy is None:
raise AnsibleError("Invalid play strategy specified: %s" % new_play.strategy, obj=play._ds)
# build the iterator
iterator = PlayIterator(
inventory=self._inventory,
play=new_play,
play_context=play_context,
variable_manager=self._variable_manager,
all_vars=all_vars,
start_at_done = self._start_at_done,
)
# during initialization, the PlayContext will clear the start_at_task
# field to signal that a matching task was found, so check that here
# and remember it so we don't try to skip tasks on future plays
if getattr(self._options, 'start_at_task', None) is not None and play_context.start_at_task is None:
self._start_at_done = True
# and run the play using the strategy and cleanup on way out
play_return = strategy.run(iterator, play_context)
self._cleanup_processes()
return play_return
def cleanup(self):
display.debug("RUNNING CLEANUP")
self.terminate()
self._final_q.close()
self._cleanup_processes()
def _cleanup_processes(self):
if self._result_prc:
self._result_prc.terminate()
for (worker_prc, main_q, rslt_q) in self._workers:
rslt_q.close()
main_q.close()
if worker_prc and worker_prc.is_alive():
worker_prc.terminate()
def clear_failed_hosts(self):
self._failed_hosts = dict()
def get_inventory(self):
return self._inventory
def get_variable_manager(self):
return self._variable_manager
def get_loader(self):
return self._loader
def get_notified_handlers(self):
return self._notified_handlers
def get_workers(self):
return self._workers[:]
def terminate(self):
self._terminated = True
def send_callback(self, method_name, *args, **kwargs):
for callback_plugin in self._callback_plugins:
# a plugin that set self.disabled to True will not be called
# see osx_say.py example for such a plugin
if getattr(callback_plugin, 'disabled', False):
continue
methods = [
getattr(callback_plugin, method_name, None),
getattr(callback_plugin, 'v2_on_any', None)
]
for method in methods:
if method is not None:
try:
method(*args, **kwargs)
except Exception as e:
try:
v1_method = method.replace('v2_','')
v1_method(*args, **kwargs)
except Exception:
display.warning('Error when using %s: %s' % (method, str(e)))
|
dermute/ansible
|
lib/ansible/executor/task_queue_manager.py
|
Python
|
gpl-3.0
| 11,908
| 0.002939
|
from __future__ import absolute_import, unicode_literals
import json
import os
from django.contrib.contenttypes.models import ContentType
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from django.shortcuts import render
from django.utils.encoding import python_2_unicode_compatible
from django.utils.six import text_type
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
from unidecode import unidecode
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailadmin.utils import send_mail
from wagtail.wagtailcore import hooks
from wagtail.wagtailcore.models import Orderable, Page, UserPagePermissionsProxy, get_page_models
from .forms import FormBuilder, WagtailAdminFormPageForm
FORM_FIELD_CHOICES = (
('singleline', _('Single line text')),
('multiline', _('Multi-line text')),
('email', _('Email')),
('number', _('Number')),
('url', _('URL')),
('checkbox', _('Checkbox')),
('checkboxes', _('Checkboxes')),
('dropdown', _('Drop down')),
('radio', _('Radio buttons')),
('date', _('Date')),
('datetime', _('Date/time')),
)
@python_2_unicode_compatible
class AbstractFormSubmission(models.Model):
"""
Data for a form submission.
You can create custom submission model based on this abstract model.
For example, if you need to save additional data or a reference to a user.
"""
form_data = models.TextField()
page = models.ForeignKey(Page, on_delete=models.CASCADE)
submit_time = models.DateTimeField(verbose_name=_('submit time'), auto_now_add=True)
def get_data(self):
"""
Returns dict with form data.
You can override this method to add additional data.
"""
form_data = json.loads(self.form_data)
form_data.update({
'submit_time': self.submit_time,
})
return form_data
def __str__(self):
return self.form_data
class Meta:
abstract = True
verbose_name = _('form submission')
class FormSubmission(AbstractFormSubmission):
"""Data for a Form submission."""
class AbstractFormField(Orderable):
"""
Database Fields required for building a Django Form field.
"""
label = models.CharField(
verbose_name=_('label'),
max_length=255,
help_text=_('The label of the form field')
)
field_type = models.CharField(verbose_name=_('field type'), max_length=16, choices=FORM_FIELD_CHOICES)
required = models.BooleanField(verbose_name=_('required'), default=True)
choices = models.TextField(
verbose_name=_('choices'),
blank=True,
help_text=_('Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.')
)
default_value = models.CharField(
verbose_name=_('default value'),
max_length=255,
blank=True,
help_text=_('Default value. Comma separated values supported for checkboxes.')
)
help_text = models.CharField(verbose_name=_('help text'), max_length=255, blank=True)
@property
def clean_name(self):
# unidecode will return an ascii string while slugify wants a
# unicode string on the other hand, slugify returns a safe-string
# which will be converted to a normal str
return str(slugify(text_type(unidecode(self.label))))
panels = [
FieldPanel('label'),
FieldPanel('help_text'),
FieldPanel('required'),
FieldPanel('field_type', classname="formbuilder-type"),
FieldPanel('choices', classname="formbuilder-choices"),
FieldPanel('default_value', classname="formbuilder-default"),
]
class Meta:
abstract = True
ordering = ['sort_order']
_FORM_CONTENT_TYPES = None
def get_form_types():
global _FORM_CONTENT_TYPES
if _FORM_CONTENT_TYPES is None:
form_models = [
model for model in get_page_models()
if issubclass(model, AbstractForm)
]
_FORM_CONTENT_TYPES = list(
ContentType.objects.get_for_models(*form_models).values()
)
return _FORM_CONTENT_TYPES
def get_forms_for_user(user):
"""
Return a queryset of form pages that this user is allowed to access the submissions for
"""
editable_forms = UserPagePermissionsProxy(user).editable_pages()
editable_forms = editable_forms.filter(content_type__in=get_form_types())
# Apply hooks
for fn in hooks.get_hooks('filter_form_submissions_for_user'):
editable_forms = fn(user, editable_forms)
return editable_forms
class AbstractForm(Page):
"""
A Form Page. Pages implementing a form should inherit from it
"""
form_builder = FormBuilder
base_form_class = WagtailAdminFormPageForm
def __init__(self, *args, **kwargs):
super(AbstractForm, self).__init__(*args, **kwargs)
if not hasattr(self, 'landing_page_template'):
name, ext = os.path.splitext(self.template)
self.landing_page_template = name + '_landing' + ext
class Meta:
abstract = True
def get_form_fields(self):
"""
Form page expects `form_fields` to be declared.
If you want to change backwards relation name,
you need to override this method.
"""
return self.form_fields.all()
def get_data_fields(self):
"""
Returns a list of tuples with (field_name, field_label).
"""
data_fields = [
('submit_time', _('Submission date')),
]
data_fields += [
(field.clean_name, field.label)
for field in self.get_form_fields()
]
return data_fields
def get_form_class(self):
fb = self.form_builder(self.get_form_fields())
return fb.get_form_class()
def get_form_parameters(self):
return {}
def get_form(self, *args, **kwargs):
form_class = self.get_form_class()
form_params = self.get_form_parameters()
form_params.update(kwargs)
return form_class(*args, **form_params)
def get_landing_page_template(self, request, *args, **kwargs):
return self.landing_page_template
def get_submission_class(self):
"""
Returns submission class.
You can override this method to provide custom submission class.
Your class must be inherited from AbstractFormSubmission.
"""
return FormSubmission
def process_form_submission(self, form):
"""
Accepts form instance with submitted data, user and page.
Creates submission instance.
You can override this method if you want to have custom creation logic.
For example, if you want to save reference to a user.
"""
self.get_submission_class().objects.create(
form_data=json.dumps(form.cleaned_data, cls=DjangoJSONEncoder),
page=self,
)
def serve(self, request, *args, **kwargs):
if request.method == 'POST':
form = self.get_form(request.POST, page=self, user=request.user)
if form.is_valid():
self.process_form_submission(form)
# render the landing_page
# TODO: It is much better to redirect to it
return render(
request,
self.get_landing_page_template(request),
self.get_context(request)
)
else:
form = self.get_form(page=self, user=request.user)
context = self.get_context(request)
context['form'] = form
return render(
request,
self.get_template(request),
context
)
preview_modes = [
('form', 'Form'),
('landing', 'Landing page'),
]
def serve_preview(self, request, mode):
if mode == 'landing':
return render(
request,
self.get_landing_page_template(request),
self.get_context(request)
)
else:
return super(AbstractForm, self).serve_preview(request, mode)
class AbstractEmailForm(AbstractForm):
"""
A Form Page that sends email. Pages implementing a form to be send to an email should inherit from it
"""
to_address = models.CharField(
verbose_name=_('to address'), max_length=255, blank=True,
help_text=_("Optional - form submissions will be emailed to these addresses. Separate multiple addresses by comma.")
)
from_address = models.CharField(verbose_name=_('from address'), max_length=255, blank=True)
subject = models.CharField(verbose_name=_('subject'), max_length=255, blank=True)
def process_form_submission(self, form):
submission = super(AbstractEmailForm, self).process_form_submission(form)
if self.to_address:
self.send_mail(form)
return submission
def send_mail(self, form):
addresses = [x.strip() for x in self.to_address.split(',')]
content = []
for field in form:
value = field.value()
if isinstance(value, list):
value = ', '.join(value)
content.append('{}: {}'.format(field.label, value))
content = '\n'.join(content)
send_mail(self.subject, content, addresses, self.from_address,)
class Meta:
abstract = True
|
chrxr/wagtail
|
wagtail/wagtailforms/models.py
|
Python
|
bsd-3-clause
| 9,515
| 0.001261
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extract UserMetrics "actions" strings from the Chrome source.
This program generates the list of known actions we expect to see in the
user behavior logs. It walks the Chrome source, looking for calls to
UserMetrics functions, extracting actions and warning on improper calls,
as well as generating the lists of possible actions in situations where
there are many possible actions.
See also:
chrome/browser/user_metrics.h
http://wiki.corp.google.com/twiki/bin/view/Main/ChromeUserExperienceMetrics
Run it from the chrome/browser directory like:
extract_actions.py > actions_list
"""
__author__ = 'evanm (Evan Martin)'
import os
import re
import sys
from google import path_utils
# Files that are known to use UserMetrics::RecordComputedAction(), which means
# they require special handling code in this script.
# To add a new file, add it to this list and add the appropriate logic to
# generate the known actions to AddComputedActions() below.
KNOWN_COMPUTED_USERS = [
'back_forward_menu_model.cc',
'options_page_view.cc',
'render_view_host.cc', # called using webkit identifiers
'user_metrics.cc', # method definition
'new_tab_ui.cc', # most visited clicks 1-9
]
def AddComputedActions(actions):
"""Add computed actions to the actions list.
Arguments:
actions: set of actions to add to.
"""
# Actions for back_forward_menu_model.cc.
for dir in ['BackMenu_', 'ForwardMenu_']:
actions.add(dir + 'ShowFullHistory')
actions.add(dir + 'Popup')
for i in range(1, 20):
actions.add(dir + 'HistoryClick' + str(i))
actions.add(dir + 'ChapterClick' + str(i))
# Actions for new_tab_ui.cc.
for i in range(1, 10):
actions.add('MostVisited%d' % i)
def AddWebKitEditorActions(actions):
"""Add editor actions from editor_client_impl.cc.
Arguments:
actions: set of actions to add to.
"""
action_re = re.compile(r'''\{ [\w']+, +\w+, +"(.*)" +\},''')
editor_file = os.path.join(path_utils.ScriptDir(), '..', '..', 'webkit',
'glue', 'editor_client_impl.cc')
for line in open(editor_file):
match = action_re.search(line)
if match: # Plain call to RecordAction
actions.add(match.group(1))
def GrepForActions(path, actions):
"""Grep a source file for calls to UserMetrics functions.
Arguments:
path: path to the file
actions: set of actions to add to
"""
action_re = re.compile(r'[> ]UserMetrics:?:?RecordAction\(L"(.*)"')
other_action_re = re.compile(r'[> ]UserMetrics:?:?RecordAction\(')
computed_action_re = re.compile(r'UserMetrics::RecordComputedAction')
for line in open(path):
match = action_re.search(line)
if match: # Plain call to RecordAction
actions.add(match.group(1))
elif other_action_re.search(line):
# Warn if this file shouldn't be mentioning RecordAction.
if os.path.basename(path) != 'user_metrics.cc':
print >>sys.stderr, 'WARNING: %s has funny RecordAction' % path
elif computed_action_re.search(line):
# Warn if this file shouldn't be calling RecordComputedAction.
if os.path.basename(path) not in KNOWN_COMPUTED_USERS:
print >>sys.stderr, 'WARNING: %s has RecordComputedAction' % path
def WalkDirectory(root_path, actions):
for path, dirs, files in os.walk(root_path):
if '.svn' in dirs:
dirs.remove('.svn')
for file in files:
ext = os.path.splitext(file)[1]
if ext == '.cc':
GrepForActions(os.path.join(path, file), actions)
def main(argv):
actions = set()
AddComputedActions(actions)
AddWebKitEditorActions(actions)
# Walk the source tree to process all .cc files.
chrome_root = os.path.join(path_utils.ScriptDir(), '..')
WalkDirectory(chrome_root, actions)
webkit_root = os.path.join(path_utils.ScriptDir(), '..', '..', 'webkit')
WalkDirectory(os.path.join(webkit_root, 'glue'), actions)
WalkDirectory(os.path.join(webkit_root, 'port'), actions)
# Print out the actions as a sorted list.
for action in sorted(actions):
print action
if '__main__' == __name__:
main(sys.argv)
|
rwatson/chromium-capsicum
|
chrome/tools/extract_actions.py
|
Python
|
bsd-3-clause
| 4,264
| 0.010319
|
import os.path
import sys
from setuptools import setup, Command
class Tox(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import tox
sys.exit(tox.cmdline([]))
setup(
name="kafka-python",
version="0.9.0",
install_requires=["distribute"],
tests_require=["tox"],
cmdclass={"test": Tox},
packages=["kafka"],
author="David Arthur",
author_email="mumrah@gmail.com",
url="https://github.com/mumrah/kafka-python",
license="Copyright 2012, David Arthur under Apache License, v2.0",
description="Pure Python client for Apache Kafka",
long_description="""
This module provides low-level protocol support for Apache Kafka as well as
high-level consumer and producer classes. Request batching is supported by the
protocol as well as broker-aware request routing. Gzip and Snappy compression
is also supported for message sets.
"""
)
|
skyportsystems/kafka-python
|
setup.py
|
Python
|
apache-2.0
| 990
| 0
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Feedback'
db.create_table(u'auxiliary_feedback', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('content', self.gf('django.db.models.fields.TextField')()),
('suggested_by', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='feedbacl', null=True, to=orm['auth.User'])),
('ip_address', self.gf('django.db.models.fields.IPAddressField')(max_length=15, null=True, blank=True)),
('user_agent', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal(u'auxiliary', ['Feedback'])
def backwards(self, orm):
# Deleting model 'Feedback'
db.delete_table(u'auxiliary_feedback')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'auxiliary.feedback': {
'Meta': {'object_name': 'Feedback'},
'content': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'suggested_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'feedbacl'", 'null': 'True', 'to': u"orm['auth.User']"}),
'user_agent': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'auxiliary.tidbit': {
'Meta': {'object_name': 'Tidbit'},
'button_link': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'button_text': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content': ('tinymce.models.HTMLField', [], {}),
'icon': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'default': '20', 'db_index': 'True'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'suggested_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'tidbits'", 'null': 'True', 'to': u"orm['auth.User']"}),
'title': ('django.db.models.fields.CharField', [], {'default': "u'Did you know ?'", 'max_length': '40'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['auxiliary']
|
OriHoch/Open-Knesset
|
auxiliary/migrations/0007_auto__add_feedback.py
|
Python
|
bsd-3-clause
| 5,930
| 0.007757
|
"""events
Revision ID: 9c92c85163a9
Revises: 666668eae682
Create Date: 2016-05-09 19:04:44.498817
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '9c92c85163a9'
down_revision = '666668eae682'
def upgrade():
op.create_table('event',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('origin', sa.Unicode(), nullable=True),
sa.Column('data', postgresql.JSONB(), nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.drop_table('processing_log')
def downgrade():
op.create_table('processing_log',
sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('id', sa.BIGINT(), nullable=False),
sa.Column('operation', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('component', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('source_location', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('content_hash', sa.VARCHAR(length=65), autoincrement=False, nullable=True),
sa.Column('foreign_id', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('source_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('document_id', sa.BIGINT(), autoincrement=False, nullable=True),
sa.Column('meta', postgresql.JSONB(), autoincrement=False, nullable=True),
sa.Column('error_type', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('error_message', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('error_details', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=u'processing_log_pkey')
)
op.drop_table('event')
|
gazeti/aleph
|
aleph/migrate/versions/9c92c85163a9_events.py
|
Python
|
mit
| 1,959
| 0.011741
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author: KevinMidboe
# @Date: 2017-02-08 14:00:04
# @Last Modified by: KevinMidboe
# @Last Modified time: 2017-02-16 17:08:08
import requests
from pprint import pprint
try:
from plexSearch import plexSearch
except ImportError:
from plex.plexSearch import plexSearch
apiKey = "9fa154f5355c37a1b9b57ac06e7d6712"
def tmdbSearch(query, page=1):
payload = {"api_key":apiKey, "query":str(query), "language":"en.US", "page":str(page), }
header = {'Accept': 'application/json'}
try:
r = requests.get("https://api.themoviedb.org/3/search/multi", params=payload, headers=header)
except requests.exceptions.ConnectionError:
return {"errors": "Could not connecting to: tmdb.com"}
except requests.exceptions.Timeout:
return {"errors": "Request timed out."}
except requests.exceptions.TooManyRedirects:
return {"errors": "Too many redirects, do you full network access?"}
if r.status_code == 401:
return {"errors": "api key is not valid."}
elif r.status_code == 404:
return {"errors": "Please check url. (404)"}
elif r.status_code == requests.codes.ok and r.json()['total_results'] == 0:
return {"errors": "No results found."}
return r.json()
if __name__ == "__main__":
import sys
print(sys.argv)
if len(sys.argv) > 2:
pprint(tmdbSearch(sys.argv[1], int(sys.argv[2])))
elif len(sys.argv) > 1:
pprint(tmdbSearch(sys.argv[1]))
else:
pprint(tmdbSearch("star+wars",1))
|
KevinMidboe/statusHandler
|
plex/tmdb.py
|
Python
|
mit
| 1,451
| 0.026189
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def levelOrderBottom(self, root):
list = []
self.helper(list, root, 0)
return list[::-1]
def helper(self, list, root, level):
if root == None:
return
if level >= len(list):
list.append([])
list[level].append(root.val)
self.helper(list, root.left, level + 1)
self.helper(list, root.right, level + 1)
from TestObjects import *
b = BinaryTree()
s = Solution()
print s.levelOrderBottom(b.root)
|
Jspsun/LEETCodePractice
|
Python/BinaryTreeLevelOrderTraversal2.py
|
Python
|
mit
| 687
| 0.002911
|
from __future__ import absolute_import
from datetime import datetime
from django.core.urlresolvers import reverse
from sentry.models import Release
from sentry.testutils import APITestCase
class ProjectReleaseListTest(APITestCase):
def test_simple(self):
self.login_as(user=self.user)
team = self.create_team(owner=self.user)
project1 = self.create_project(team=team, name='foo')
project2 = self.create_project(team=team, name='bar')
release1 = Release.objects.create(
project=project1,
version='1',
date_added=datetime(2013, 8, 13, 3, 8, 24, 880386),
)
release2 = Release.objects.create(
project=project1,
version='2',
date_added=datetime(2013, 8, 14, 3, 8, 24, 880386),
)
Release.objects.create(
project=project2,
version='1',
)
url = reverse('sentry-api-0-project-releases', kwargs={
'project_id': project1.id,
})
response = self.client.get(url, format='json')
assert response.status_code == 200, response.content
assert len(response.data) == 2
assert response.data[0]['version'] == release2.version
assert response.data[1]['version'] == release1.version
class ProjectReleaseCreateTest(APITestCase):
def test_simple(self):
self.login_as(user=self.user)
team = self.create_team(owner=self.user)
project = self.create_project(team=team, name='foo')
url = reverse('sentry-api-0-project-releases', kwargs={
'project_id': project.id,
})
response = self.client.post(url, data={
'version': 'abcdef',
})
assert response.status_code == 201, response.content
assert response.data['version']
assert Release.objects.filter(
project=project,
version=response.data['version'],
).exists()
|
jokey2k/sentry
|
tests/sentry/api/endpoints/test_project_releases.py
|
Python
|
bsd-3-clause
| 1,979
| 0
|
import unittest
from convert import convert
class TestConvert(unittest.TestCase):
def testEmptyJsonParse(self):
generated = convert.parse(convert._load_json_files("./jsonSamples/minimal.json")[0])
def testGlossaryJsonParse(self):
generated = convert.parse(convert._load_json_files("./jsonSamples/Glossary.json")[0])
generated = convert.generate("Test", ["cs"], generated)
for f in generated:
print "".join(f["content"])
|
HenrikPoulsen/Json2Class
|
test/testConvert.py
|
Python
|
mit
| 475
| 0.006316
|
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# tails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
# This script is called by snmptrapd and sends
# all traps to the mkeventd
#
# Bastian Kuhn, bk@mathias-kettner.de
# If you use this script please keep in mind that this script is called
# for every trap the server receives.
# To use this Script, you have to configure your snmptrad.conf like that:
# authCommunity execute public
# traphandle default /path/to/this/script
# Define the Hostname patterns here:
hostname_patterns = [
'SMI::enterprises.2349.2.2.2.5 = "(.*)"'
]
import time
import sys
import re
# Insert here the name of your omd site
site_name = "TESTSITE"
deamon_path = "/omd/sites/%s/tmp/run/mkeventd/events" % site_name
data = []
match_host = False
for line in sys.stdin:
line = line.strip()
if hostname_patterns:
for pattern in hostname_patterns:
e = re.search(pattern, line)
if e:
match_host = e.group(1)
data.append(line)
msg = " ".join(data[2:])
host, ip = data[:2]
if match_host:
host = match_host.strip()
#Write to mkevent Socket
out = open(deamon_path, "w")
timestamp = time.strftime("%b %d %H:%M:%S", time.localtime(time.time()))
out.write("<5>%s %s trap: %s\n" % (timestamp, host, msg))
out.close()
|
huiyiqun/check_mk
|
doc/treasures/Event_Console/snmptd_mkevent.py
|
Python
|
gpl-2.0
| 2,674
| 0.00187
|
# -*- coding: utf-8 -*-
import pathlib
from typing import Union
import lxml.etree
def save_as_xml(
element_tree: Union[lxml.etree._Element, lxml.etree._ElementTree],
filepath: Union[str, pathlib.Path],
pretty_print: bool = True) -> None:
"""save ElementTree in the file as XML
Args:
element_tree (lxml.etree._ElementTree): the ElementTree to be save.
filepath (str, pathlib.Path): The path of the File to be output as XML.
pretty_print (bool) optional:
The Argument of lxml.etree.tostring.
Defaults to True.
"""
if not isinstance(filepath, pathlib.Path):
filepath = pathlib.Path(filepath)
with filepath.open(mode='w', encoding='utf-8', newline='') as file:
file.write(lxml.etree.tostring(
element_tree,
encoding='utf-8',
pretty_print=pretty_print,
xml_declaration=True).decode('utf-8'))
|
085astatine/togetter
|
togetter/xml_tools.py
|
Python
|
mit
| 972
| 0
|
import unittest
from test import test_support
import UserDict, random, string
import gc, weakref
class DictTest(unittest.TestCase):
def test_constructor(self):
# calling built-in types without argument must return empty
self.assertEqual(dict(), {})
self.assert_(dict() is not {})
def test_literal_constructor(self):
# check literal constructor for different sized dicts (to exercise the BUILD_MAP oparg
for n in (0, 1, 6, 256, 400):
items = [(''.join([random.choice(string.letters)
for j in range(8)]),
i)
for i in range(n)]
random.shuffle(items)
dictliteral = '{' + ', '.join('%r: %d' % item for item in items) + '}'
self.assertEqual(eval(dictliteral), dict(items))
def test_bool(self):
self.assert_(not {})
self.assert_({1: 2})
self.assert_(bool({}) is False)
self.assert_(bool({1: 2}) is True)
def test_keys(self):
d = {}
self.assertEqual(d.keys(), [])
d = {'a': 1, 'b': 2}
k = d.keys()
self.assert_(d.has_key('a'))
self.assert_(d.has_key('b'))
self.assertRaises(TypeError, d.keys, None)
def test_values(self):
d = {}
self.assertEqual(d.values(), [])
d = {1:2}
self.assertEqual(d.values(), [2])
self.assertRaises(TypeError, d.values, None)
def test_items(self):
d = {}
self.assertEqual(d.items(), [])
d = {1:2}
self.assertEqual(d.items(), [(1, 2)])
self.assertRaises(TypeError, d.items, None)
def test_has_key(self):
d = {}
self.assert_(not d.has_key('a'))
d = {'a': 1, 'b': 2}
k = d.keys()
k.sort()
self.assertEqual(k, ['a', 'b'])
self.assertRaises(TypeError, d.has_key)
def test_contains(self):
d = {}
self.assert_(not ('a' in d))
self.assert_('a' not in d)
d = {'a': 1, 'b': 2}
self.assert_('a' in d)
self.assert_('b' in d)
self.assert_('c' not in d)
self.assertRaises(TypeError, d.__contains__)
def test_len(self):
d = {}
self.assertEqual(len(d), 0)
d = {'a': 1, 'b': 2}
self.assertEqual(len(d), 2)
def test_getitem(self):
d = {'a': 1, 'b': 2}
self.assertEqual(d['a'], 1)
self.assertEqual(d['b'], 2)
d['c'] = 3
d['a'] = 4
self.assertEqual(d['c'], 3)
self.assertEqual(d['a'], 4)
del d['b']
self.assertEqual(d, {'a': 4, 'c': 3})
self.assertRaises(TypeError, d.__getitem__)
class BadEq(object):
def __eq__(self, other):
raise Exc()
def __hash__(self):
return 24
d = {}
d[BadEq()] = 42
self.assertRaises(KeyError, d.__getitem__, 23)
class Exc(Exception): pass
class BadHash(object):
fail = False
def __hash__(self):
if self.fail:
raise Exc()
else:
return 42
x = BadHash()
d[x] = 42
x.fail = True
self.assertRaises(Exc, d.__getitem__, x)
def test_clear(self):
d = {1:1, 2:2, 3:3}
d.clear()
self.assertEqual(d, {})
self.assertRaises(TypeError, d.clear, None)
def test_update(self):
d = {}
d.update({1:100})
d.update({2:20})
d.update({1:1, 2:2, 3:3})
self.assertEqual(d, {1:1, 2:2, 3:3})
d.update()
self.assertEqual(d, {1:1, 2:2, 3:3})
self.assertRaises((TypeError, AttributeError), d.update, None)
class SimpleUserDict:
def __init__(self):
self.d = {1:1, 2:2, 3:3}
def keys(self):
return self.d.keys()
def __getitem__(self, i):
return self.d[i]
d.clear()
d.update(SimpleUserDict())
self.assertEqual(d, {1:1, 2:2, 3:3})
class Exc(Exception): pass
d.clear()
class FailingUserDict:
def keys(self):
raise Exc
self.assertRaises(Exc, d.update, FailingUserDict())
class FailingUserDict:
def keys(self):
class BogonIter:
def __init__(self):
self.i = 1
def __iter__(self):
return self
def next(self):
if self.i:
self.i = 0
return 'a'
raise Exc
return BogonIter()
def __getitem__(self, key):
return key
self.assertRaises(Exc, d.update, FailingUserDict())
class FailingUserDict:
def keys(self):
class BogonIter:
def __init__(self):
self.i = ord('a')
def __iter__(self):
return self
def next(self):
if self.i <= ord('z'):
rtn = chr(self.i)
self.i += 1
return rtn
raise StopIteration
return BogonIter()
def __getitem__(self, key):
raise Exc
self.assertRaises(Exc, d.update, FailingUserDict())
class badseq(object):
def __iter__(self):
return self
def next(self):
raise Exc()
self.assertRaises(Exc, {}.update, badseq())
self.assertRaises(ValueError, {}.update, [(1, 2, 3)])
def test_fromkeys(self):
self.assertEqual(dict.fromkeys('abc'), {'a':None, 'b':None, 'c':None})
d = {}
self.assert_(not(d.fromkeys('abc') is d))
self.assertEqual(d.fromkeys('abc'), {'a':None, 'b':None, 'c':None})
self.assertEqual(d.fromkeys((4,5),0), {4:0, 5:0})
self.assertEqual(d.fromkeys([]), {})
def g():
yield 1
self.assertEqual(d.fromkeys(g()), {1:None})
self.assertRaises(TypeError, {}.fromkeys, 3)
class dictlike(dict): pass
self.assertEqual(dictlike.fromkeys('a'), {'a':None})
self.assertEqual(dictlike().fromkeys('a'), {'a':None})
self.assert_(type(dictlike.fromkeys('a')) is dictlike)
self.assert_(type(dictlike().fromkeys('a')) is dictlike)
class mydict(dict):
def __new__(cls):
return UserDict.UserDict()
ud = mydict.fromkeys('ab')
self.assertEqual(ud, {'a':None, 'b':None})
self.assert_(isinstance(ud, UserDict.UserDict))
self.assertRaises(TypeError, dict.fromkeys)
class Exc(Exception): pass
class baddict1(dict):
def __init__(self):
raise Exc()
self.assertRaises(Exc, baddict1.fromkeys, [1])
class BadSeq(object):
def __iter__(self):
return self
def next(self):
raise Exc()
self.assertRaises(Exc, dict.fromkeys, BadSeq())
class baddict2(dict):
def __setitem__(self, key, value):
raise Exc()
self.assertRaises(Exc, baddict2.fromkeys, [1])
# test fast path for dictionary inputs
d = dict(zip(range(6), range(6)))
self.assertEqual(dict.fromkeys(d, 0), dict(zip(range(6), [0]*6)))
def test_copy(self):
d = {1:1, 2:2, 3:3}
self.assertEqual(d.copy(), {1:1, 2:2, 3:3})
self.assertEqual({}.copy(), {})
self.assertRaises(TypeError, d.copy, None)
def test_get(self):
d = {}
self.assert_(d.get('c') is None)
self.assertEqual(d.get('c', 3), 3)
d = {'a' : 1, 'b' : 2}
self.assert_(d.get('c') is None)
self.assertEqual(d.get('c', 3), 3)
self.assertEqual(d.get('a'), 1)
self.assertEqual(d.get('a', 3), 1)
self.assertRaises(TypeError, d.get)
self.assertRaises(TypeError, d.get, None, None, None)
def test_setdefault(self):
# dict.setdefault()
d = {}
self.assert_(d.setdefault('key0') is None)
d.setdefault('key0', [])
self.assert_(d.setdefault('key0') is None)
d.setdefault('key', []).append(3)
self.assertEqual(d['key'][0], 3)
d.setdefault('key', []).append(4)
self.assertEqual(len(d['key']), 2)
self.assertRaises(TypeError, d.setdefault)
class Exc(Exception): pass
class BadHash(object):
fail = False
def __hash__(self):
if self.fail:
raise Exc()
else:
return 42
x = BadHash()
d[x] = 42
x.fail = True
self.assertRaises(Exc, d.setdefault, x, [])
def test_popitem(self):
# dict.popitem()
for copymode in -1, +1:
# -1: b has same structure as a
# +1: b is a.copy()
for log2size in range(12):
size = 2**log2size
a = {}
b = {}
for i in range(size):
a[repr(i)] = i
if copymode < 0:
b[repr(i)] = i
if copymode > 0:
b = a.copy()
for i in range(size):
ka, va = ta = a.popitem()
self.assertEqual(va, int(ka))
kb, vb = tb = b.popitem()
self.assertEqual(vb, int(kb))
self.assert_(not(copymode < 0 and ta != tb))
self.assert_(not a)
self.assert_(not b)
d = {}
self.assertRaises(KeyError, d.popitem)
def test_pop(self):
# Tests for pop with specified key
d = {}
k, v = 'abc', 'def'
d[k] = v
self.assertRaises(KeyError, d.pop, 'ghi')
self.assertEqual(d.pop(k), v)
self.assertEqual(len(d), 0)
self.assertRaises(KeyError, d.pop, k)
# verify longs/ints get same value when key > 32 bits (for 64-bit archs)
# see SF bug #689659
x = 4503599627370496L
y = 4503599627370496
h = {x: 'anything', y: 'something else'}
self.assertEqual(h[x], h[y])
self.assertEqual(d.pop(k, v), v)
d[k] = v
self.assertEqual(d.pop(k, 1), v)
self.assertRaises(TypeError, d.pop)
class Exc(Exception): pass
class BadHash(object):
fail = False
def __hash__(self):
if self.fail:
raise Exc()
else:
return 42
x = BadHash()
d[x] = 42
x.fail = True
self.assertRaises(Exc, d.pop, x)
def test_mutatingiteration(self):
d = {}
d[1] = 1
try:
for i in d:
d[i+1] = 1
except RuntimeError:
pass
else:
self.fail("changing dict size during iteration doesn't raise Error")
def test_repr(self):
d = {}
self.assertEqual(repr(d), '{}')
d[1] = 2
self.assertEqual(repr(d), '{1: 2}')
d = {}
d[1] = d
self.assertEqual(repr(d), '{1: {...}}')
class Exc(Exception): pass
class BadRepr(object):
def __repr__(self):
raise Exc()
d = {1: BadRepr()}
self.assertRaises(Exc, repr, d)
def test_le(self):
self.assert_(not ({} < {}))
self.assert_(not ({1: 2} < {1L: 2L}))
class Exc(Exception): pass
class BadCmp(object):
def __eq__(self, other):
raise Exc()
def __hash__(self):
return 42
d1 = {BadCmp(): 1}
d2 = {1: 1}
try:
d1 < d2
except Exc:
pass
else:
self.fail("< didn't raise Exc")
def test_missing(self):
# Make sure dict doesn't have a __missing__ method
self.assertEqual(hasattr(dict, "__missing__"), False)
self.assertEqual(hasattr({}, "__missing__"), False)
# Test several cases:
# (D) subclass defines __missing__ method returning a value
# (E) subclass defines __missing__ method raising RuntimeError
# (F) subclass sets __missing__ instance variable (no effect)
# (G) subclass doesn't define __missing__ at a all
class D(dict):
def __missing__(self, key):
return 42
d = D({1: 2, 3: 4})
self.assertEqual(d[1], 2)
self.assertEqual(d[3], 4)
self.assert_(2 not in d)
self.assert_(2 not in d.keys())
self.assertEqual(d[2], 42)
class E(dict):
def __missing__(self, key):
raise RuntimeError(key)
e = E()
try:
e[42]
except RuntimeError, err:
self.assertEqual(err.args, (42,))
else:
self.fail("e[42] didn't raise RuntimeError")
class F(dict):
def __init__(self):
# An instance variable __missing__ should have no effect
self.__missing__ = lambda key: None
f = F()
try:
f[42]
except KeyError, err:
self.assertEqual(err.args, (42,))
else:
self.fail("f[42] didn't raise KeyError")
class G(dict):
pass
g = G()
try:
g[42]
except KeyError, err:
self.assertEqual(err.args, (42,))
else:
self.fail("g[42] didn't raise KeyError")
def test_tuple_keyerror(self):
# SF #1576657
d = {}
try:
d[(1,)]
except KeyError, e:
self.assertEqual(e.args, ((1,),))
else:
self.fail("missing KeyError")
def test_bad_key(self):
# Dictionary lookups should fail if __cmp__() raises an exception.
class CustomException(Exception):
pass
class BadDictKey:
def __hash__(self):
return hash(self.__class__)
def __cmp__(self, other):
if isinstance(other, self.__class__):
raise CustomException
return other
d = {}
x1 = BadDictKey()
x2 = BadDictKey()
d[x1] = 1
for stmt in ['d[x2] = 2',
'z = d[x2]',
'x2 in d',
'd.has_key(x2)',
'd.get(x2)',
'd.setdefault(x2, 42)',
'd.pop(x2)',
'd.update({x2: 2})']:
try:
exec stmt in locals()
except CustomException:
pass
else:
self.fail("Statement didn't raise exception")
def test_resize1(self):
# Dict resizing bug, found by Jack Jansen in 2.2 CVS development.
# This version got an assert failure in debug build, infinite loop in
# release build. Unfortunately, provoking this kind of stuff requires
# a mix of inserts and deletes hitting exactly the right hash codes in
# exactly the right order, and I can't think of a randomized approach
# that would be *likely* to hit a failing case in reasonable time.
d = {}
for i in range(5):
d[i] = i
for i in range(5):
del d[i]
for i in range(5, 9): # i==8 was the problem
d[i] = i
def test_resize2(self):
# Another dict resizing bug (SF bug #1456209).
# This caused Segmentation faults or Illegal instructions.
class X(object):
def __hash__(self):
return 5
def __eq__(self, other):
if resizing:
d.clear()
return False
d = {}
resizing = False
d[X()] = 1
d[X()] = 2
d[X()] = 3
d[X()] = 4
d[X()] = 5
# now trigger a resize
resizing = True
d[9] = 6
def test_empty_presized_dict_in_freelist(self):
# Bug #3537: if an empty but presized dict with a size larger
# than 7 was in the freelist, it triggered an assertion failure
try:
d = {'a': 1/0, 'b': None, 'c': None, 'd': None, 'e': None,
'f': None, 'g': None, 'h': None}
except ZeroDivisionError:
pass
d = {}
def test_container_iterator(self):
# Bug #3680: tp_traverse was not implemented for dictiter objects
class C(object):
pass
iterators = (dict.iteritems, dict.itervalues, dict.iterkeys)
for i in iterators:
obj = C()
ref = weakref.ref(obj)
container = {obj: 1}
obj.x = i(container)
del obj, container
gc.collect()
self.assert_(ref() is None, "Cycle was not collected")
from test import mapping_tests
class GeneralMappingTests(mapping_tests.BasicTestMappingProtocol):
type2test = dict
class Dict(dict):
pass
class SubclassMappingTests(mapping_tests.BasicTestMappingProtocol):
type2test = Dict
def test_main():
test_support.run_unittest(
DictTest,
GeneralMappingTests,
SubclassMappingTests,
)
if __name__ == "__main__":
test_main()
|
leighpauls/k2cro4
|
third_party/python_26/Lib/test/test_dict.py
|
Python
|
bsd-3-clause
| 17,651
| 0.005156
|
# -*- coding: utf-8 -*-
# Akvo RSR is covered by the GNU Affero General Public License.
# See more details in the license.txt file located at the root folder of the Akvo RSR module.
# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from akvo.rsr.forms import (check_password_minimum_length, check_password_has_number,
check_password_has_upper, check_password_has_lower,
check_password_has_symbol)
from akvo.rsr.models import ProjectHierarchy
from .employment import EmploymentSerializer
from .organisation import (
OrganisationExtraSerializer, OrganisationBasicSerializer, UserManagementOrgSerializer)
from .program import ProgramSerializer
from .rsr_serializer import BaseRSRSerializer
class UserRawSerializer(BaseRSRSerializer):
"""
Raw user serializer.
"""
class Meta:
model = get_user_model()
fields = (
'id',
'first_name',
'last_name',
'email',
'is_active',
'is_staff',
'is_admin',
'is_support',
'is_superuser',
)
class UserSerializer(BaseRSRSerializer):
# Needed to show only the first organisation of the user
organisation = OrganisationExtraSerializer(source='first_organisation', required=False,)
organisations = OrganisationExtraSerializer(many=True, required=False,)
user_management_organisations = UserManagementOrgSerializer(many=True, required=False)
approved_employments = EmploymentSerializer(many=True, required=False,)
api_key = serializers.ReadOnlyField(source='get_api_key')
# Legacy fields to support Tastypie API emulation
legacy_org = serializers.SerializerMethodField()
username = serializers.SerializerMethodField()
can_manage_users = serializers.SerializerMethodField()
programs = serializers.SerializerMethodField()
class Meta:
model = get_user_model()
fields = (
'id',
'first_name',
'last_name',
'email',
'username',
'is_active',
'is_staff',
'is_admin',
'is_support',
'is_superuser',
'can_manage_users',
'organisation',
'organisations',
'approved_employments',
'api_key',
'legacy_org',
'programs',
'user_management_organisations',
'seen_announcements',
)
def __init__(self, *args, **kwargs):
""" Delete the 'absolute_url' field added in BaseRSRSerializer.__init__().
It's neither correct nor do we want this data to be visible.
Remove the fields "legacy_org" and "username" that are only present to support older
versions of Up calling the Tastypie API endpoints that we now emulate using DRF
"""
super(UserSerializer, self).__init__(*args, **kwargs)
del self.fields['absolute_url']
# Remove the fields unless we're called via Tastypie URLs
request = kwargs.get("context", {}).get("request", None)
if request and "/api/v1/" not in request.path:
del self.fields['legacy_org']
del self.fields['username']
def get_legacy_org(self, obj):
""" Up needs the last tag to be the user's org, it only needs the org ID
"""
if obj.first_organisation():
return {"object": {"id": obj.first_organisation().id}}
return None
def get_username(self, obj):
return obj.email
def get_can_manage_users(self, obj):
return obj.has_perm('rsr.user_management')
def get_programs(self, user):
hierarchies = ProjectHierarchy.objects.select_related('root_project')\
.prefetch_related('root_project__partners').all()
if not (user.is_superuser or user.is_admin):
hierarchies = hierarchies.filter(root_project__in=user.my_projects()).distinct()
return ProgramSerializer(hierarchies, many=True, context=self.context).data
class UserPasswordSerializer(serializers.Serializer):
"""Change password serializer"""
old_password = serializers.CharField(
help_text='Current Password',
)
new_password1 = serializers.CharField(
help_text='New Password',
)
new_password2 = serializers.CharField(
help_text='New Password (confirmation)',
)
class Meta:
fields = '__all__'
def validate_old_password(self, value):
"""Check for current password"""
if not self.instance.check_password(value):
raise serializers.ValidationError(_('Old password is not correct.'))
return value
def validate(self, data):
"""Check if password1 and password2 match"""
if data['new_password1'] != data['new_password2']:
raise serializers.ValidationError(_('Passwords do not match.'))
password = data['new_password1']
check_password_minimum_length(password)
check_password_has_number(password)
check_password_has_upper(password)
check_password_has_lower(password)
check_password_has_symbol(password)
return data
def update(self, instance, validated_data):
instance.set_password(validated_data.get('new_password2', instance.password))
return instance
class UserDetailsSerializer(BaseRSRSerializer):
approved_organisations = OrganisationBasicSerializer(many=True, required=False)
email = serializers.ReadOnlyField()
class Meta:
model = get_user_model()
fields = (
'id',
'email',
'first_name',
'last_name',
'approved_organisations',
)
def __init__(self, *args, **kwargs):
""" Delete the 'absolute_url' field added in BaseRSRSerializer.__init__().
It's neither correct nor do we want this data to be visible.
"""
super(UserDetailsSerializer, self).__init__(*args, **kwargs)
del self.fields['absolute_url']
|
akvo/akvo-rsr
|
akvo/rest/serializers/user.py
|
Python
|
agpl-3.0
| 6,303
| 0.002697
|
from opc.drivers.baseclass import RopDriver
class Driver(RopDriver):
"""
Just pass back the raw data to the caller for rendering by the app
"""
def __init__(self, width, height, address):
pass
def putPixels(self, channel, pixels):
return pixels
def sysEx(self, systemId, commandId, msg):
pass
def setGlobalColorCorrection(self, gamma, r, g, b):
pass
def terminate(self):
pass
|
slobberchops/rop
|
opc/drivers/echo.py
|
Python
|
gpl-3.0
| 455
| 0
|
#!/usr/bin/env python2.7
# __BEGIN_LICENSE__
#
# Copyright (C) 2010-2012 Stanford University.
# All rights reserved.
#
# __END_LICENSE__
# rectify.py
#
# Usage: rectify.py <lightfield_image.{tif,png,etc}> [--pixels-per-lenslet <ppl>]
#
# This script simply applies a rectification from a
# campixel_to_camlens.warp file to a single light field image. The
# resulting rectified image should have the lenslets aligned with the
# horizontal and vertical dimensions of the image. You can optionally
# specify the number of pixels per lenlet you would like in the output
# image, otherwise this value is computed for you based on the input
# imagery and the warp file.
from lflib.imageio import load_image, save_image
import sys, os, math
import numpy as np
if __name__ == "__main__":
# Parse command line options
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-p", "--output-pixels-per-lenslet", dest="output_pixels_per_lenslet",
help="Specify the number of pixels per lenslet in the output image.")
parser.add_option("-o", "--output-file", dest="output_filename",
help="Specify the output filename.")
parser.add_option("-c", "--calibration-file", dest="calibration_file",
help="Specify the calibration file to use for rectification.")
parser.add_option('-s', "--subaperture",
action="store_true", dest="subaperture", default=False,
help="Save out the light field image as tiled subapertures.")
(options, args) = parser.parse_args()
if len(args) < 1:
print 'You must supply at least one light field image to rectify.\n'
parser.print_help()
sys.exit(1)
print 'Rectifying', len(args), 'images.'
for filename in args:
# Default calibration filename has a *.lfc suffix, and the same prefix
if not options.calibration_file:
fileName, fileExtension = os.path.splitext(filename)
calibration_file = fileName + '.lfc'
else:
calibration_file = options.calibration_file
# Default output filename has a -RECTIFIED suffix
if not options.output_filename:
fileName, fileExtension = os.path.splitext(filename)
output_filename = fileName + '-RECTIFIED' + fileExtension
else:
output_filename = options.output_filename
# Load the calibration data
from lflib.calibration import LightFieldCalibration
lfcal = LightFieldCalibration.load(calibration_file)
# Rectify the image
im = load_image(filename, normalize = False)
input_dtype = im.dtype
# Perform dark frame subtraction
im = lfcal.subtract_dark_frame(im)
# Rectify the image
rectified_lf = lfcal.rectify_lf(im)
# Optionally reformat the image so that sub-aperturs are tiled, rather than lenslet images.
from lflib.lightfield import LightField
if (options.subaperture):
im = rectified_lf.asimage(LightField.TILED_SUBAPERTURE)
print '\t--> Saving ', output_filename, 'as tiled sub-aperture image.'
else:
im = rectified_lf.asimage(LightField.TILED_LENSLET)
print '\t--> Saving ', output_filename, 'as tiled lenslet image.'
print im.max()
print im.min()
#im = im/im.max()*255
#print im.max()
#print im.min()
#save_image(output_filename, im.astype(np.uint8), dtype=np.uint8)
save_image(output_filename, im, dtype=np.uint16)
|
sophie63/FlyLFM
|
stanford_lfanalyze_v0.4/lfrectify.py
|
Python
|
bsd-2-clause
| 3,622
| 0.004417
|
################################################################################
# Copyright (C) 2013-2014 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
"""
Unit tests for `dot` module.
"""
import unittest
import numpy as np
import scipy
from numpy import testing
from ..dot import Dot, SumMultiply
from ..gaussian import Gaussian, GaussianARD
from bayespy.nodes import GaussianGamma
from ...vmp import VB
from bayespy.utils import misc
from bayespy.utils import linalg
from bayespy.utils import random
from bayespy.utils.misc import TestCase
class TestSumMultiply(TestCase):
def test_parent_validity(self):
"""
Test that the parent nodes are validated properly in SumMultiply
"""
V = GaussianARD(1, 1)
X = Gaussian(np.ones(1), np.identity(1))
Y = Gaussian(np.ones(3), np.identity(3))
Z = Gaussian(np.ones(5), np.identity(5))
A = SumMultiply(X, ['i'])
self.assertEqual(A.dims, ((), ()))
A = SumMultiply('i', X)
self.assertEqual(A.dims, ((), ()))
A = SumMultiply(X, ['i'], ['i'])
self.assertEqual(A.dims, ((1,), (1,1)))
A = SumMultiply('i->i', X)
self.assertEqual(A.dims, ((1,), (1,1)))
A = SumMultiply(X, ['i'], Y, ['j'], ['i','j'])
self.assertEqual(A.dims, ((1,3), (1,3,1,3)))
A = SumMultiply('i,j->ij', X, Y)
self.assertEqual(A.dims, ((1,3), (1,3,1,3)))
A = SumMultiply(V, [], X, ['i'], Y, ['i'], [])
self.assertEqual(A.dims, ((), ()))
A = SumMultiply(',i,i->', V, X, Y)
self.assertEqual(A.dims, ((), ()))
# Gaussian-gamma parents
C = GaussianGamma(np.ones(3), np.identity(3), 1, 1)
A = SumMultiply(Y, ['i'], C, ['i'], ['i'])
self.assertEqual(A.dims, ((3,), (3,3), (), ()))
A = SumMultiply('i,i->i', Y, C)
self.assertEqual(A.dims, ((3,), (3,3), (), ()))
C = GaussianGamma(np.ones(3), np.identity(3), 1, 1)
A = SumMultiply(Y, ['i'], C, ['i'], [])
self.assertEqual(A.dims, ((), (), (), ()))
A = SumMultiply('i,i->', Y, C)
self.assertEqual(A.dims, ((), (), (), ()))
# Error: not enough inputs
self.assertRaises(ValueError,
SumMultiply)
self.assertRaises(ValueError,
SumMultiply,
X)
# Error: too many keys
self.assertRaises(ValueError,
SumMultiply,
Y,
['i', 'j'])
self.assertRaises(ValueError,
SumMultiply,
'ij',
Y)
# Error: not broadcastable
self.assertRaises(ValueError,
SumMultiply,
Y,
['i'],
Z,
['i'])
self.assertRaises(ValueError,
SumMultiply,
'i,i',
Y,
Z)
# Error: output key not in inputs
self.assertRaises(ValueError,
SumMultiply,
X,
['i'],
['j'])
self.assertRaises(ValueError,
SumMultiply,
'i->j',
X)
# Error: non-unique input keys
self.assertRaises(ValueError,
SumMultiply,
X,
['i','i'])
self.assertRaises(ValueError,
SumMultiply,
'ii',
X)
# Error: non-unique output keys
self.assertRaises(ValueError,
SumMultiply,
X,
['i'],
['i','i'])
self.assertRaises(ValueError,
SumMultiply,
'i->ii',
X)
# String has too many '->'
self.assertRaises(ValueError,
SumMultiply,
'i->i->i',
X)
# String has too many input nodes
self.assertRaises(ValueError,
SumMultiply,
'i,i->i',
X)
# Same parent several times
self.assertRaises(ValueError,
SumMultiply,
'i,i->i',
X,
X)
# Same parent several times via deterministic node
Xh = SumMultiply('i->i', X)
self.assertRaises(ValueError,
SumMultiply,
'i,i->i',
X,
Xh)
def test_message_to_child(self):
"""
Test the message from SumMultiply to its children.
"""
def compare_moments(u0, u1, *args):
Y = SumMultiply(*args)
u_Y = Y.get_moments()
self.assertAllClose(u_Y[0], u0)
self.assertAllClose(u_Y[1], u1)
# Test constant parent
y = np.random.randn(2,3,4)
compare_moments(y,
linalg.outer(y, y, ndim=2),
'ij->ij',
y)
# Do nothing for 2-D array
Y = GaussianARD(np.random.randn(5,2,3),
np.random.rand(5,2,3),
plates=(5,),
shape=(2,3))
y = Y.get_moments()
compare_moments(y[0],
y[1],
'ij->ij',
Y)
compare_moments(y[0],
y[1],
Y,
[0,1],
[0,1])
# Sum over the rows of a matrix
Y = GaussianARD(np.random.randn(5,2,3),
np.random.rand(5,2,3),
plates=(5,),
shape=(2,3))
y = Y.get_moments()
mu = np.einsum('...ij->...j', y[0])
cov = np.einsum('...ijkl->...jl', y[1])
compare_moments(mu,
cov,
'ij->j',
Y)
compare_moments(mu,
cov,
Y,
[0,1],
[1])
# Inner product of three vectors
X1 = GaussianARD(np.random.randn(2),
np.random.rand(2),
plates=(),
shape=(2,))
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(6,1,2),
np.random.rand(6,1,2),
plates=(6,1),
shape=(2,))
x2 = X2.get_moments()
X3 = GaussianARD(np.random.randn(7,6,5,2),
np.random.rand(7,6,5,2),
plates=(7,6,5),
shape=(2,))
x3 = X3.get_moments()
mu = np.einsum('...i,...i,...i->...', x1[0], x2[0], x3[0])
cov = np.einsum('...ij,...ij,...ij->...', x1[1], x2[1], x3[1])
compare_moments(mu,
cov,
'i,i,i',
X1,
X2,
X3)
compare_moments(mu,
cov,
'i,i,i->',
X1,
X2,
X3)
compare_moments(mu,
cov,
X1,
[9],
X2,
[9],
X3,
[9])
compare_moments(mu,
cov,
X1,
[9],
X2,
[9],
X3,
[9],
[])
# Outer product of two vectors
X1 = GaussianARD(np.random.randn(2),
np.random.rand(2),
plates=(5,),
shape=(2,))
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(6,1,2),
np.random.rand(6,1,2),
plates=(6,1),
shape=(2,))
x2 = X2.get_moments()
mu = np.einsum('...i,...j->...ij', x1[0], x2[0])
cov = np.einsum('...ik,...jl->...ijkl', x1[1], x2[1])
compare_moments(mu,
cov,
'i,j->ij',
X1,
X2)
compare_moments(mu,
cov,
X1,
[9],
X2,
[7],
[9,7])
# Matrix product
Y1 = GaussianARD(np.random.randn(3,2),
np.random.rand(3,2),
plates=(),
shape=(3,2))
y1 = Y1.get_moments()
Y2 = GaussianARD(np.random.randn(5,2,3),
np.random.rand(5,2,3),
plates=(5,),
shape=(2,3))
y2 = Y2.get_moments()
mu = np.einsum('...ik,...kj->...ij', y1[0], y2[0])
cov = np.einsum('...ikjl,...kmln->...imjn', y1[1], y2[1])
compare_moments(mu,
cov,
'ik,kj->ij',
Y1,
Y2)
compare_moments(mu,
cov,
Y1,
['i','k'],
Y2,
['k','j'],
['i','j'])
# Trace of a matrix product
Y1 = GaussianARD(np.random.randn(3,2),
np.random.rand(3,2),
plates=(),
shape=(3,2))
y1 = Y1.get_moments()
Y2 = GaussianARD(np.random.randn(5,2,3),
np.random.rand(5,2,3),
plates=(5,),
shape=(2,3))
y2 = Y2.get_moments()
mu = np.einsum('...ij,...ji->...', y1[0], y2[0])
cov = np.einsum('...ikjl,...kilj->...', y1[1], y2[1])
compare_moments(mu,
cov,
'ij,ji',
Y1,
Y2)
compare_moments(mu,
cov,
'ij,ji->',
Y1,
Y2)
compare_moments(mu,
cov,
Y1,
['i','j'],
Y2,
['j','i'])
compare_moments(mu,
cov,
Y1,
['i','j'],
Y2,
['j','i'],
[])
# Vector-matrix-vector product
X1 = GaussianARD(np.random.randn(3),
np.random.rand(3),
plates=(),
shape=(3,))
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(6,1,2),
np.random.rand(6,1,2),
plates=(6,1),
shape=(2,))
x2 = X2.get_moments()
Y = GaussianARD(np.random.randn(3,2),
np.random.rand(3,2),
plates=(),
shape=(3,2))
y = Y.get_moments()
mu = np.einsum('...i,...ij,...j->...', x1[0], y[0], x2[0])
cov = np.einsum('...ia,...ijab,...jb->...', x1[1], y[1], x2[1])
compare_moments(mu,
cov,
'i,ij,j',
X1,
Y,
X2)
compare_moments(mu,
cov,
X1,
[1],
Y,
[1,2],
X2,
[2])
# Complex sum-product of 0-D, 1-D, 2-D and 3-D arrays
V = GaussianARD(np.random.randn(7,6,5),
np.random.rand(7,6,5),
plates=(7,6,5),
shape=())
v = V.get_moments()
X = GaussianARD(np.random.randn(6,1,2),
np.random.rand(6,1,2),
plates=(6,1),
shape=(2,))
x = X.get_moments()
Y = GaussianARD(np.random.randn(3,4),
np.random.rand(3,4),
plates=(5,),
shape=(3,4))
y = Y.get_moments()
Z = GaussianARD(np.random.randn(4,2,3),
np.random.rand(4,2,3),
plates=(6,5),
shape=(4,2,3))
z = Z.get_moments()
mu = np.einsum('...,...i,...kj,...jik->...k', v[0], x[0], y[0], z[0])
cov = np.einsum('...,...ia,...kjcb,...jikbac->...kc', v[1], x[1], y[1], z[1])
compare_moments(mu,
cov,
',i,kj,jik->k',
V,
X,
Y,
Z)
compare_moments(mu,
cov,
V,
[],
X,
['i'],
Y,
['k','j'],
Z,
['j','i','k'],
['k'])
# Test with constant nodes
N = 10
D = 5
a = np.random.randn(N, D)
B = Gaussian(
np.random.randn(D),
random.covariance(D),
)
X = SumMultiply('i,i->', B, a)
np.testing.assert_allclose(
X.get_moments()[0],
np.einsum('ni,i->n', a, B.get_moments()[0]),
)
np.testing.assert_allclose(
X.get_moments()[1],
np.einsum('ni,nj,ij->n', a, a, B.get_moments()[1]),
)
#
# Gaussian-gamma parents
#
# Outer product of vectors
X1 = GaussianARD(np.random.randn(2),
np.random.rand(2),
shape=(2,))
x1 = X1.get_moments()
X2 = GaussianGamma(
np.random.randn(6,1,2),
random.covariance(2),
np.random.rand(6,1),
np.random.rand(6,1),
plates=(6,1)
)
x2 = X2.get_moments()
Y = SumMultiply('i,j->ij', X1, X2)
u = Y._message_to_child()
y = np.einsum('...i,...j->...ij', x1[0], x2[0])
yy = np.einsum('...ik,...jl->...ijkl', x1[1], x2[1])
self.assertAllClose(u[0], y)
self.assertAllClose(u[1], yy)
self.assertAllClose(u[2], x2[2])
self.assertAllClose(u[3], x2[3])
# Test with constant nodes
N = 10
M = 8
D = 5
a = np.random.randn(N, 1, D)
B = GaussianGamma(
np.random.randn(M, D),
random.covariance(D, size=(M,)),
np.random.rand(M),
np.random.rand(M),
ndim=1,
)
X = SumMultiply('i,i->', B, a)
np.testing.assert_allclose(
X.get_moments()[0],
np.einsum('nmi,mi->nm', a, B.get_moments()[0]),
)
np.testing.assert_allclose(
X.get_moments()[1],
np.einsum('nmi,nmj,mij->nm', a, a, B.get_moments()[1]),
)
np.testing.assert_allclose(
X.get_moments()[2],
B.get_moments()[2],
)
np.testing.assert_allclose(
X.get_moments()[3],
B.get_moments()[3],
)
pass
def test_message_to_parent(self):
"""
Test the message from SumMultiply node to its parents.
"""
data = 2
tau = 3
def check_message(true_m0, true_m1, parent, *args, F=None):
if F is None:
A = SumMultiply(*args)
B = GaussianARD(A, tau)
B.observe(data*np.ones(A.plates + A.dims[0]))
else:
A = F
(A_m0, A_m1) = A._message_to_parent(parent)
self.assertAllClose(true_m0, A_m0)
self.assertAllClose(true_m1, A_m1)
pass
# Check: different message to each of multiple parents
X1 = GaussianARD(np.random.randn(2),
np.random.rand(2),
ndim=1)
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(2),
np.random.rand(2),
ndim=1)
x2 = X2.get_moments()
m0 = tau * data * x2[0]
m1 = -0.5 * tau * x2[1] * np.identity(2)
check_message(m0, m1, 0,
'i,i->i',
X1,
X2)
check_message(m0, m1, 0,
X1,
[9],
X2,
[9],
[9])
m0 = tau * data * x1[0]
m1 = -0.5 * tau * x1[1] * np.identity(2)
check_message(m0, m1, 1,
'i,i->i',
X1,
X2)
check_message(m0, m1, 1,
X1,
[9],
X2,
[9],
[9])
# Check: key not in output
X1 = GaussianARD(np.random.randn(2),
np.random.rand(2),
ndim=1)
x1 = X1.get_moments()
m0 = tau * data * np.ones(2)
m1 = -0.5 * tau * np.ones((2,2))
check_message(m0, m1, 0,
'i',
X1)
check_message(m0, m1, 0,
'i->',
X1)
check_message(m0, m1, 0,
X1,
[9])
check_message(m0, m1, 0,
X1,
[9],
[])
# Check: key not in some input
X1 = GaussianARD(np.random.randn(),
np.random.rand())
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(2),
np.random.rand(2),
ndim=1)
x2 = X2.get_moments()
m0 = tau * data * np.sum(x2[0], axis=-1)
m1 = -0.5 * tau * np.sum(x2[1] * np.identity(2),
axis=(-1,-2))
check_message(m0, m1, 0,
',i->i',
X1,
X2)
check_message(m0, m1, 0,
X1,
[],
X2,
[9],
[9])
m0 = tau * data * x1[0] * np.ones(2)
m1 = -0.5 * tau * x1[1] * np.identity(2)
check_message(m0, m1, 1,
',i->i',
X1,
X2)
check_message(m0, m1, 1,
X1,
[],
X2,
[9],
[9])
# Check: keys in different order
Y1 = GaussianARD(np.random.randn(3,2),
np.random.rand(3,2),
ndim=2)
y1 = Y1.get_moments()
Y2 = GaussianARD(np.random.randn(2,3),
np.random.rand(2,3),
ndim=2)
y2 = Y2.get_moments()
m0 = tau * data * y2[0].T
m1 = -0.5 * tau * np.einsum('ijlk->jikl', y2[1] * misc.identity(2,3))
check_message(m0, m1, 0,
'ij,ji->ij',
Y1,
Y2)
check_message(m0, m1, 0,
Y1,
['i','j'],
Y2,
['j','i'],
['i','j'])
m0 = tau * data * y1[0].T
m1 = -0.5 * tau * np.einsum('ijlk->jikl', y1[1] * misc.identity(3,2))
check_message(m0, m1, 1,
'ij,ji->ij',
Y1,
Y2)
check_message(m0, m1, 1,
Y1,
['i','j'],
Y2,
['j','i'],
['i','j'])
# Check: plates when different dimensionality
X1 = GaussianARD(np.random.randn(5),
np.random.rand(5),
shape=(),
plates=(5,))
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(5,3),
np.random.rand(5,3),
shape=(3,),
plates=(5,))
x2 = X2.get_moments()
m0 = tau * data * np.sum(np.ones((5,3)) * x2[0], axis=-1)
m1 = -0.5 * tau * np.sum(x2[1] * misc.identity(3), axis=(-1,-2))
check_message(m0, m1, 0,
',i->i',
X1,
X2)
check_message(m0, m1, 0,
X1,
[],
X2,
['i'],
['i'])
m0 = tau * data * x1[0][:,np.newaxis] * np.ones((5,3))
m1 = -0.5 * tau * x1[1][:,np.newaxis,np.newaxis] * misc.identity(3)
check_message(m0, m1, 1,
',i->i',
X1,
X2)
check_message(m0, m1, 1,
X1,
[],
X2,
['i'],
['i'])
# Check: other parent's moments broadcasts over plates when node has the
# same plates
X1 = GaussianARD(np.random.randn(5,4,3),
np.random.rand(5,4,3),
shape=(3,),
plates=(5,4))
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(3),
np.random.rand(3),
shape=(3,),
plates=(5,4))
x2 = X2.get_moments()
m0 = tau * data * np.ones((5,4,3)) * x2[0]
m1 = -0.5 * tau * x2[1] * misc.identity(3)
check_message(m0, m1, 0,
'i,i->i',
X1,
X2)
check_message(m0, m1, 0,
X1,
['i'],
X2,
['i'],
['i'])
# Check: other parent's moments broadcasts over plates when node does
# not have that plate
X1 = GaussianARD(np.random.randn(3),
np.random.rand(3),
shape=(3,),
plates=())
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(3),
np.random.rand(3),
shape=(3,),
plates=(5,4))
x2 = X2.get_moments()
m0 = tau * data * np.sum(np.ones((5,4,3)) * x2[0], axis=(0,1))
m1 = -0.5 * tau * np.sum(np.ones((5,4,1,1))
* misc.identity(3)
* x2[1],
axis=(0,1))
check_message(m0, m1, 0,
'i,i->i',
X1,
X2)
check_message(m0, m1, 0,
X1,
['i'],
X2,
['i'],
['i'])
# Check: other parent's moments broadcasts over plates when the node
# only broadcasts that plate
X1 = GaussianARD(np.random.randn(3),
np.random.rand(3),
shape=(3,),
plates=(1,1))
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(3),
np.random.rand(3),
shape=(3,),
plates=(5,4))
x2 = X2.get_moments()
m0 = tau * data * np.sum(np.ones((5,4,3)) * x2[0], axis=(0,1), keepdims=True)
m1 = -0.5 * tau * np.sum(np.ones((5,4,1,1))
* misc.identity(3)
* x2[1],
axis=(0,1),
keepdims=True)
check_message(m0, m1, 0,
'i,i->i',
X1,
X2)
check_message(m0, m1, 0,
X1,
['i'],
X2,
['i'],
['i'])
# Check: broadcasted dimensions
X1 = GaussianARD(np.random.randn(1,1),
np.random.rand(1,1),
ndim=2)
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(3,2),
np.random.rand(3,2),
ndim=2)
x2 = X2.get_moments()
m0 = tau * data * np.sum(np.ones((3,2)) * x2[0],
keepdims=True)
m1 = -0.5 * tau * np.sum(misc.identity(3,2) * x2[1],
keepdims=True)
check_message(m0, m1, 0,
'ij,ij->ij',
X1,
X2)
check_message(m0, m1, 0,
X1,
[0,1],
X2,
[0,1],
[0,1])
m0 = tau * data * np.ones((3,2)) * x1[0]
m1 = -0.5 * tau * misc.identity(3,2) * x1[1]
check_message(m0, m1, 1,
'ij,ij->ij',
X1,
X2)
check_message(m0, m1, 1,
X1,
[0,1],
X2,
[0,1],
[0,1])
# Check: non-ARD observations
X1 = GaussianARD(np.random.randn(2),
np.random.rand(2),
ndim=1)
x1 = X1.get_moments()
Lambda = np.array([[2, 1.5], [1.5, 2]])
F = SumMultiply('i->i', X1)
Y = Gaussian(F, Lambda)
y = np.random.randn(2)
Y.observe(y)
m0 = np.dot(Lambda, y)
m1 = -0.5 * Lambda
check_message(m0, m1, 0,
'i->i',
X1,
F=F)
check_message(m0, m1, 0,
X1,
['i'],
['i'],
F=F)
# Check: mask with same shape
X1 = GaussianARD(np.random.randn(3,2),
np.random.rand(3,2),
shape=(2,),
plates=(3,))
x1 = X1.get_moments()
mask = np.array([True, False, True])
F = SumMultiply('i->i', X1)
Y = GaussianARD(F, tau, ndim=1)
Y.observe(data*np.ones((3,2)), mask=mask)
m0 = tau * data * mask[:,np.newaxis] * np.ones(2)
m1 = -0.5 * tau * mask[:,np.newaxis,np.newaxis] * np.identity(2)
check_message(m0, m1, 0,
'i->i',
X1,
F=F)
check_message(m0, m1, 0,
X1,
['i'],
['i'],
F=F)
# Check: mask larger
X1 = GaussianARD(np.random.randn(2),
np.random.rand(2),
shape=(2,),
plates=())
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(3,2),
np.random.rand(3,2),
shape=(2,),
plates=(3,))
x2 = X2.get_moments()
mask = np.array([True, False, True])
F = SumMultiply('i,i->i', X1, X2)
Y = GaussianARD(F, tau,
plates=(3,),
ndim=1)
Y.observe(data*np.ones((3,2)), mask=mask)
m0 = tau * data * np.sum(mask[:,np.newaxis] * x2[0], axis=0)
m1 = -0.5 * tau * np.sum(mask[:,np.newaxis,np.newaxis]
* x2[1]
* np.identity(2),
axis=0)
check_message(m0, m1, 0,
'i,i->i',
X1,
X2,
F=F)
check_message(m0, m1, 0,
X1,
['i'],
X2,
['i'],
['i'],
F=F)
# Check: mask for broadcasted plate
X1 = GaussianARD(np.random.randn(2),
np.random.rand(2),
ndim=1,
plates=(1,))
x1 = X1.get_moments()
X2 = GaussianARD(np.random.randn(2),
np.random.rand(2),
ndim=1,
plates=(3,))
x2 = X2.get_moments()
mask = np.array([True, False, True])
F = SumMultiply('i,i->i', X1, X2)
Y = GaussianARD(F, tau,
plates=(3,),
ndim=1)
Y.observe(data*np.ones((3,2)), mask=mask)
m0 = tau * data * np.sum(mask[:,np.newaxis] * x2[0],
axis=0,
keepdims=True)
m1 = -0.5 * tau * np.sum(mask[:,np.newaxis,np.newaxis]
* x2[1]
* np.identity(2),
axis=0,
keepdims=True)
check_message(m0, m1, 0,
'i->i',
X1,
F=F)
check_message(m0, m1, 0,
X1,
['i'],
['i'],
F=F)
# Test with constant nodes
N = 10
M = 8
D = 5
K = 3
a = np.random.randn(N, D)
B = Gaussian(
np.random.randn(D),
random.covariance(D),
)
C = GaussianARD(
np.random.randn(M, 1, D, K),
np.random.rand(M, 1, D, K),
ndim=2
)
F = SumMultiply('i,i,ij->', a, B, C)
tau = np.random.rand(M, N)
Y = GaussianARD(F, tau, plates=(M,N))
y = np.random.randn(M, N)
Y.observe(y)
(m0, m1) = F._message_to_parent(1)
np.testing.assert_allclose(
m0,
np.einsum('mn,ni,mnik->i', tau*y, a, C.get_moments()[0]),
)
np.testing.assert_allclose(
m1,
np.einsum('mn,ni,nj,mnikjl->ij', -0.5*tau, a, a, C.get_moments()[1]),
)
# Check: Gaussian-gamma parents
X1 = GaussianGamma(
np.random.randn(2),
random.covariance(2),
np.random.rand(),
np.random.rand()
)
x1 = X1.get_moments()
X2 = GaussianGamma(
np.random.randn(2),
random.covariance(2),
np.random.rand(),
np.random.rand()
)
x2 = X2.get_moments()
F = SumMultiply('i,i->i', X1, X2)
V = random.covariance(2)
y = np.random.randn(2)
Y = Gaussian(F, V)
Y.observe(y)
m0 = np.dot(V, y) * x2[0]
m1 = -0.5 * V * x2[1]
m2 = -0.5 * np.einsum('i,ij,j', y, V, y) * x2[2]#linalg.inner(V, x2[2], ndim=2)
m3 = 0.5 * 2 #linalg.chol_logdet(linalg.chol(V)) + 2*x2[3]
m = F._message_to_parent(0)
self.assertAllClose(m[0], m0)
self.assertAllClose(m[1], m1)
self.assertAllClose(m[2], m2)
self.assertAllClose(m[3], m3)
# Delta moments
N = 10
M = 8
D = 5
a = np.random.randn(N, D)
B = GaussianGamma(
np.random.randn(D),
random.covariance(D),
np.random.rand(),
np.random.rand(),
ndim=1
)
F = SumMultiply('i,i->', a, B)
tau = np.random.rand(M, N)
Y = GaussianARD(F, tau, plates=(M,N))
y = np.random.randn(M, N)
Y.observe(y)
(m0, m1, m2, m3) = F._message_to_parent(1)
np.testing.assert_allclose(
m0,
np.einsum('mn,ni->i', tau*y, a),
)
np.testing.assert_allclose(
m1,
np.einsum('mn,ni,nj->ij', -0.5*tau, a, a),
)
np.testing.assert_allclose(
m2,
np.einsum('mn->', -0.5*tau*y**2),
)
np.testing.assert_allclose(
m3,
np.einsum('mn->', 0.5*np.ones(np.shape(tau))),
)
pass
def test_compute_moments(self):
return
def check_performance(scale=1e2):
"""
Tests that the implementation of SumMultiply is efficient.
This is not a unit test (not run automatically), but rather a
performance test, which you may run to test the performance of the
node. A naive implementation of SumMultiply will run out of memory in
some cases and this method checks that the implementation is not naive
but good.
"""
# Check: Broadcasted plates are computed efficiently
# (bad implementation will take a long time to run)
s = scale
X1 = GaussianARD(np.random.randn(s,s),
np.random.rand(s,s),
shape=(s,),
plates=(s,))
X2 = GaussianARD(np.random.randn(s,1,s),
np.random.rand(s,1,s),
shape=(s,),
plates=(s,1))
F = SumMultiply('i,i', X1, X2)
Y = GaussianARD(F, 1)
Y.observe(np.ones((s,s)))
try:
F._message_to_parent(1)
except e:
print(e)
print('SOMETHING BAD HAPPENED')
# Check: Broadcasted dimensions are computed efficiently
# (bad implementation will run out of memory)
pass
|
bayespy/bayespy
|
bayespy/inference/vmp/nodes/tests/test_dot.py
|
Python
|
mit
| 34,668
| 0.006202
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ArticleComment'
db.create_table('cms_articlecomment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('article', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['cms.Article'])),
('created_at', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
('author', self.gf('django.db.models.fields.CharField')(max_length=60)),
('comment', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('cms', ['ArticleComment'])
def backwards(self, orm):
# Deleting model 'ArticleComment'
db.delete_table('cms_articlecomment')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.article': {
'Meta': {'ordering': "['title']", 'object_name': 'Article'},
'allow_comments': ('django.db.models.fields.CharField', [], {'default': "'N'", 'max_length': '1'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'content': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'conversions': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime.now'}),
'header': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'keywords': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'sections': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['cms.Section']", 'null': 'True', 'through': "orm['cms.SectionItem']", 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '250', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'updated_at': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'cms.articlearchive': {
'Meta': {'ordering': "('updated_at',)", 'object_name': 'ArticleArchive'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Article']"}),
'content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'header': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'cms.articlecomment': {
'Meta': {'ordering': "('created_at',)", 'object_name': 'ArticleComment'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Article']"}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'comment': ('django.db.models.fields.TextField', [], {}),
'created_at': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'cms.filedownload': {
'Meta': {'object_name': 'FileDownload'},
'count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.File']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'uuid': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'})
},
'cms.menu': {
'Meta': {'object_name': 'Menu'},
'article': ('smart_selects.db_fields.ChainedForeignKey', [], {'default': 'None', 'to': "orm['cms.Article']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'link': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Menu']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'section': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Section']", 'null': 'True', 'blank': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.section': {
'Meta': {'ordering': "['title']", 'object_name': 'Section'},
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['cms.Article']", 'null': 'True', 'through': "orm['cms.SectionItem']", 'blank': 'True'}),
'conversions': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'header': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '250', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'cms.sectionitem': {
'Meta': {'ordering': "['order']", 'object_name': 'SectionItem'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Article']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'section': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Section']"})
},
'cms.urlmigrate': {
'Meta': {'object_name': 'URLMigrate'},
'dtupdate': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_url': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'obs': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'old_url': ('django.db.models.fields.CharField', [], {'max_length': '250', 'db_index': 'True'}),
'redirect_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': "orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
}
}
complete_apps = ['cms']
|
josircg/raizcidadanista
|
raizcidadanista/cms/migrations/0004_auto__add_articlecomment.py
|
Python
|
gpl-3.0
| 14,504
| 0.008274
|
# -*- coding: utf-8 -*-
import json
import datetime
from decimal import Decimal
from requests.packages.urllib3.util import parse_url
from .models import BaseModel
from .errors import OptimoError
DEFAULT_API_VERSION = 'v1'
class CoreOptimoEncoder(json.JSONEncoder):
"""Custom JSON encoder that knows how to serialize ``datetime.datetime``
and ``decimal.Decimal`` objects.
"""
def default(self, o):
if isinstance(o, datetime.datetime):
return o.strftime('%Y-%m-%dT%H:%M')
if isinstance(o, Decimal):
return float(o)
class OptimoEncoder(CoreOptimoEncoder):
"""Custom JSON encoder that knows how to serialize
:class:`optimo.models.BaseModel <BaseModel>` objects.
"""
def default(self, o):
if isinstance(o, BaseModel):
return o.as_optimo_schema()
return super(OptimoEncoder, self).default(o)
def validate_url(url):
"""Asserts that the url string has a valid protocol scheme.
:param url: ``str`` url we want to validate
:raises OptimoError: When we can't deduce a valid protocol scheme
"""
_url = parse_url(url)
if not _url.scheme:
raise OptimoError("The url: '{}' does not define a protocol scheme"
.format(url))
def validate_config_params(optimo_url, version, access_key):
"""Validates and normalizes the parameters passed to
:class:`optimo.api.OptimoAPI` constructor.
:param optimo_url: string url of the optimoroute's service
:param version: ``int`` or ``str`` denoting the API version
:param access_key: string access key provided by optimoroute
:return: ``tuple`` of the, possibly adjusted, passed parameters.
:raises OptimoError: On providing incomplete or invalid config data
"""
if not optimo_url or not isinstance(optimo_url, basestring):
raise OptimoError("'optimo_url' must be a url string")
validate_url(optimo_url)
if not version or not isinstance(version, basestring) or not \
version.startswith('v'):
raise OptimoError("'version' must be a string denoting the API version "
"you want to use('v1', 'v2', etc")
if not access_key or not isinstance(access_key, basestring):
raise OptimoError("'access_key' must be the string access key provided "
"to you by optimoroute")
return optimo_url, version, access_key
|
fieldaware/optimoroute
|
optimo/util.py
|
Python
|
bsd-3-clause
| 2,432
| 0.000822
|
# ----------------------------------------------------------------------------
# cocos2d
# Copyright (c) 2008-2012 Daniel Moisset, Ricardo Quesada, Rayentray Tappa,
# Lucio Torre
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of cocos2d nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
#
# Python Interpreter
# 95% of the code from 'Bruce: the presentation tool' by Richard Jones
# http://code.google.com/p/bruce-tpt/
#
#
__docformat__ = 'restructuredtext'
import sys
import os
import code
import pyglet
from pyglet import graphics
from pyglet import text
from pyglet.text import caret, document, layout
import cocos
from cocos.director import director
from base_layers import Layer
from util_layers import ColorLayer
__all__ = ['PythonInterpreterLayer']
class Output:
def __init__(self, display, realstdout):
self.out = display
self.realstdout = realstdout
self.data = ''
def write(self, data):
self.out(data)
class MyInterpreter(code.InteractiveInterpreter):
def __init__(self, locals, display):
self.write = display
code.InteractiveInterpreter.__init__(self, locals=locals)
def execute(self, input):
old_stdout = sys.stdout
sys.stdout = Output(self.write, old_stdout)
more = self.runsource(input)
sys.stdout = old_stdout
return more
class PythonInterpreterLayer(ColorLayer):
'''Runs an interactive Python interpreter as a child `Layer` of the current `Scene`.
'''
cfg = {'code.font_name':'Arial',
'code.font_size':12,
'code.color':(255,255,255,255),
'caret.color':(255,255,255),
}
name = 'py'
prompt = ">>> " #: python prompt
prompt_more = "... " #: python 'more' prompt
doing_more = False
is_event_handler = True #: enable pyglet's events
def __init__(self):
super(PythonInterpreterLayer, self).__init__( 32,32,32,192 )
self.content = self.prompt
local_vars = director.interpreter_locals
local_vars["self"] = self
self.interpreter = MyInterpreter(
local_vars, self._write)
self.current_input = []
self.history = ['']
self.history_pos = 0
def on_enter(self):
super(PythonInterpreterLayer, self).on_enter()
vw,vh = cocos.director.director.get_window_size()
# format the code
self.document = document.FormattedDocument(self.content)
self.document.set_style(0, len(self.document.text), {
'font_name': self.cfg['code.font_name'],
'font_size': self.cfg['code.font_size'],
'color': self.cfg['code.color'],
})
self.batch = graphics.Batch()
# generate the document
self.layout = layout.IncrementalTextLayout(self.document,
vw, vh, multiline=True, batch=self.batch)
self.layout.anchor_y= 'top'
self.caret = caret.Caret(self.layout, color=self.cfg['caret.color'] )
self.caret.on_activate()
self.on_resize(vw, vh)
self.start_of_line = len(self.document.text)
def on_resize(self, x, y):
vw, vh = director.get_window_size()
self.layout.begin_update()
self.layout.height = vh
self.layout.x = 2
self.layout.width = vw - 4
self.layout.y = vh
self.layout.end_update()
# XXX: hack
x,y = director.window.width, director.window.height
self.layout.top_group._scissor_width=x-4
self.caret.position = len(self.document.text)
def on_exit(self):
super(PythonInterpreterLayer, self).on_exit()
self.content = self.document.text
self.document = None
self.layout = None
self.batch = None
self.caret = None
def on_key_press(self, symbol, modifiers):
if symbol == pyglet.window.key.TAB:
return self.caret.on_text('\t')
elif symbol in (pyglet.window.key.ENTER, pyglet.window.key.NUM_ENTER):
# write the newline
self._write('\n')
line = self.document.text[self.start_of_line:]
if line.strip() == 'help()':
line = 'print "help() not supported, sorry!"'
self.current_input.append(line)
self.history_pos = len(self.history)
if line.strip():
self.history[self.history_pos-1] = line.strip()
self.history.append('')
more = False
if not self.doing_more:
more = self.interpreter.execute('\n'.join(self.current_input))
if self.doing_more and not line.strip():
self.doing_more = False
self.interpreter.execute('\n'.join(self.current_input))
more = more or self.doing_more
if not more:
self.current_input = []
self._write(self.prompt)
else:
self.doing_more = True
self._write(self.prompt_more)
self.start_of_line = len(self.document.text)
self.caret.position = len(self.document.text)
elif symbol == pyglet.window.key.SPACE:
pass
else:
return pyglet.event.EVENT_UNHANDLED
return pyglet.event.EVENT_HANDLED
def on_text(self, symbol):
# squash carriage return - we already handle them above
if symbol == '\r':
return pyglet.event.EVENT_HANDLED
self._scroll_to_bottom()
return self.caret.on_text(symbol)
def on_text_motion(self, motion):
at_sol = self.caret.position == self.start_of_line
if motion == pyglet.window.key.MOTION_UP:
# move backward in history, storing the current line of input
# if we're at the very end of time
line = self.document.text[self.start_of_line:]
if self.history_pos == len(self.history)-1:
self.history[self.history_pos] = line
self.history_pos = max(0, self.history_pos-1)
self.document.delete_text(self.start_of_line,
len(self.document.text))
self._write(self.history[self.history_pos])
self.caret.position = len(self.document.text)
elif motion == pyglet.window.key.MOTION_DOWN:
# move forward in the history
self.history_pos = min(len(self.history)-1, self.history_pos+1)
self.document.delete_text(self.start_of_line,
len(self.document.text))
self._write(self.history[self.history_pos])
self.caret.position = len(self.document.text)
elif motion == pyglet.window.key.MOTION_BACKSPACE:
# can't delete the prompt
if not at_sol:
return self.caret.on_text_motion(motion)
elif motion == pyglet.window.key.MOTION_LEFT:
# can't move back beyond start of line
if not at_sol:
return self.caret.on_text_motion(motion)
elif motion == pyglet.window.key.MOTION_PREVIOUS_WORD:
# can't move back word beyond start of line
if not at_sol:
return self.caret.on_text_motion(motion)
else:
return self.caret.on_text_motion(motion)
return pyglet.event.EVENT_HANDLED
def _write(self, s):
self.document.insert_text(len(self.document.text), s, {
'font_name': self.cfg['code.font_name'],
'font_size': self.cfg['code.font_size'],
'color': self.cfg['code.color'],
})
self._scroll_to_bottom()
def _scroll_to_bottom(self):
# on key press always move the view to the bottom of the screen
if self.layout.height < self.layout.content_height:
self.layout.anchor_y= 'bottom'
self.layout.y = 0
self.layout.view_y = 0
if self.caret.position < self.start_of_line:
self.caret.position = len(self.document.text)
def draw(self):
super( PythonInterpreterLayer, self).draw()
self.batch.draw()
|
shadowmint/nwidget
|
lib/cocos2d-0.5.5/cocos/layer/python_interpreter.py
|
Python
|
apache-2.0
| 9,867
| 0.003142
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'recipe_engine/step',
'url',
]
def RunSteps(api):
api.step('step1',
['/bin/echo', api.url.join('foo', 'bar', 'baz')])
api.step('step2',
['/bin/echo', api.url.join('foo/', '/bar/', '/baz')])
api.step('step3',
['/bin/echo', api.url.join('//foo/', '//bar//', '//baz//')])
api.step('step4',
['/bin/echo', api.url.join('//foo/bar//', '//baz//')])
api.url.fetch('fake://foo/bar', attempts=5)
api.url.fetch('fake://foo/bar (w/ auth)', headers={'Authorization': 'thing'})
def GenTests(api):
yield api.test('basic')
|
eunchong/build
|
scripts/slave/recipe_modules/url/example.py
|
Python
|
bsd-3-clause
| 743
| 0.012113
|
import os
import json
from django.test import TestCase
from documents.models import Document
from documents.exporters.sql import (MysqlExporter, OracleExporter,
PostgresExporter, SQLiteExporter)
TEST_DOCUMENT_PATH = os.path.join(os.path.dirname(__file__),
"fixtures/test_document.json")
class ExporterTestCase(TestCase):
def setUp(self):
self.document = Document(json.load(open(TEST_DOCUMENT_PATH)))
def test_mysql_exporter(self):
"""
Tests MySQL exporter.
"""
exporter = MysqlExporter(self.document)
self.assertEqual(exporter.as_text(), """
CREATE TABLE `permissions` (
`id` int PRIMARY KEY,
`name` varchar(255)
);
CREATE TABLE `users_roles` (
`users_id` int,
`roles_id` int,
FOREIGN KEY(`users_id`) REFERENCES `users` (`id`),
FOREIGN KEY(`roles_id`) REFERENCES `roles` (`id`)
);
CREATE TABLE `roles` (
`id` int PRIMARY KEY,
`name` varchar(255)
);
CREATE TABLE `roles_permissions` (
`roles_id` int,
`permissions_id` int,
FOREIGN KEY(`roles_id`) REFERENCES `roles` (`id`),
FOREIGN KEY(`permissions_id`) REFERENCES `permissions` (`id`)
);
CREATE TABLE `users` (
`id` int PRIMARY KEY,
`name` varchar(255)
);""".strip())
def test_oracle_exporter(self):
"""
Tests Oracle exporter.
"""
exporter = OracleExporter(self.document)
self.assertEqual(exporter.as_text(), """
CREATE TABLE "permissions" (
"id" int PRIMARY KEY,
"name" varchar(255)
);
CREATE TABLE "users_roles" (
"users_id" int CONSTRAINT users_id REFERENCES users(id),
"roles_id" int CONSTRAINT roles_id REFERENCES roles(id)
);
CREATE TABLE "roles" (
"id" int PRIMARY KEY,
"name" varchar(255)
);
CREATE TABLE "roles_permissions" (
"roles_id" int CONSTRAINT roles_id REFERENCES roles(id),
"permissions_id" int CONSTRAINT permissions_id REFERENCES permissions(id)
);
CREATE TABLE "users" (
"id" int PRIMARY KEY,
"name" varchar(255)
);""".strip())
def test_postgres_exporter(self):
"""
Tests Postgres exporter.
"""
exporter = PostgresExporter(self.document)
self.assertEqual(exporter.as_text(), """
CREATE TABLE "permissions" (
"id" int PRIMARY KEY,
"name" varchar(255)
);
CREATE TABLE "users_roles" (
"users_id" int,
"roles_id" int,
FOREIGN KEY("users_id") REFERENCES "users" ("id"),
FOREIGN KEY("roles_id") REFERENCES "roles" ("id")
);
CREATE TABLE "roles" (
"id" int PRIMARY KEY,
"name" varchar(255)
);
CREATE TABLE "roles_permissions" (
"roles_id" int,
"permissions_id" int,
FOREIGN KEY("roles_id") REFERENCES "roles" ("id"),
FOREIGN KEY("permissions_id") REFERENCES "permissions" ("id")
);
CREATE TABLE "users" (
"id" int PRIMARY KEY,
"name" varchar(255)
);""".strip())
def test_sqlite_exporter(self):
"""
Tests SQLite exporter.
"""
exporter = SQLiteExporter(self.document)
self.assertEqual(exporter.as_text(), """
CREATE TABLE "permissions" (
"id" int PRIMARY KEY,
"name" varchar(255)
);
CREATE TABLE "users_roles" (
"users_id" int FOREIGN KEY("users_id") REFERENCES "users" ("id"),
"roles_id" int FOREIGN KEY("roles_id") REFERENCES "roles" ("id")
);
CREATE TABLE "roles" (
"id" int PRIMARY KEY,
"name" varchar(255)
);
CREATE TABLE "roles_permissions" (
"roles_id" int FOREIGN KEY("roles_id") REFERENCES "roles" ("id"),
"permissions_id" int FOREIGN KEY("permissions_id") REFERENCES "permissions" ("id")
);
CREATE TABLE "users" (
"id" int PRIMARY KEY,
"name" varchar(255)
);""".strip())
|
fatiherikli/dbpatterns
|
web/dbpatterns/documents/tests.py
|
Python
|
mit
| 3,562
| 0.016844
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.