repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
edwintye/pygotools
|
pygotools/convex/__init__.py
|
Python
|
gpl-2.0
| 412
| 0.002427
|
''' direct
.. moduleauthor:: Edwin Tye <Edwin.Tye@gmai
|
l.com>
'''
from __future__ import division, print_function, absolute_import
from .sqp import *
from .ip import *
from .ipBar import *
from .ipPD import *
from .ipPDC import *
from .ipPDandPDC import *
from .approxH import *
from .trust import *
__
|
all__ = [s for s in dir() if not s.startswith('_')]
from numpy.testing import Tester
test = Tester().test
|
SNoiraud/gramps
|
gramps/gen/filters/rules/family/_hasreltype.py
|
Python
|
gpl-2.0
| 2,354
| 0.005098
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public Lice
|
nse for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#----------
|
---------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ....lib.familyreltype import FamilyRelType
from .. import Rule
#-------------------------------------------------------------------------
#
# HasAttribute
#
#-------------------------------------------------------------------------
class HasRelType(Rule):
"""Rule that checks for a person with a particular personal attribute"""
labels = [ _('Relationship type:') ]
name = _('Families with the relationship type')
description = _("Matches families with the relationship type "
"of a particular value")
category = _('General filters')
def prepare(self, db, user):
if self.list[0]:
self.rtype = FamilyRelType()
self.rtype.set_from_xml_str(self.list[0])
else:
self.rtype = None
def apply(self, db, family):
if self.rtype:
if self.rtype.is_custom() and self.use_regex:
if self.regex[0].search(str(family.get_relationship())) is None:
return False
elif self.rtype != family.get_relationship():
return False
return True
|
dw/scratch
|
overalloc.py
|
Python
|
mit
| 161
| 0.018634
|
def d(n):
tp = (n>>4)
|
+ (n>>3)
if tp < 64:
tp = 64
if tp > 2048:
|
tp = 2048
print[n,tp]
for x in range(0, 9999, 512):
d(x)
|
tushartushar/Puppeteer
|
SourceModel/SM_File.py
|
Python
|
apache-2.0
| 17,577
| 0.004153
|
import re
import SourceModel.SM_CaseStmt
import SourceModel.SM_Class
import SourceModel.SM_Constants as SMCONSTS
import SourceModel.SM_Define
import SourceModel.SM_Define
import SourceModel.SM_Element
import SourceModel.SM_Exec
import SourceModel.SM_FileResource
import SourceModel.SM_IfStmt
import SourceModel.SM_IncludeResource
import SourceModel.SM_LCOM
import SourceModel.SM_Node
import SourceModel.SM_PackageResource
import SourceModel.SM_ServiceResource
import SourceModel.SM_User
from SmellDetector import Utilities
class SM_File:
def __init__(self, file=""):
if file != "":
curFile = open(file, 'rt', errors='ignore')
self.fileText = curFile.read()
self.resourceBodyText = self.fileText
self.fileName = file
curFile.close()
def setText(self, text):
self.fileText = text
def getNoOfClassDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.CLASS_REGEX, "class")
def getNoOfDefineDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.DEFINE_REGEX, "define")
def getNoOfFileDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.FILE_REGEX, "file")
def getNoOfPackageDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.PACKAGE_REGEX, "package")
def getNoOfServiceDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.SERVICE_REGEX, "service")
def getNoOfExecDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.EXEC_REGEX, "exec")
def getLinesOfCode(self):
counter = self.countEntityDeclaration(SMCONSTS.LOC_REGEX, "newLine")
if counter > 0:
return counter+1
if (len(self.fileText) > 0):
return 1
return 0
def getLinesOfCodeWithoutComments(self):
totalLines = self.getLinesOfCode()
totalCommentsLines = self.getLinesOfComments()
return totalLines - totalCommentsLines
def getLinesOfComments(self):
counter = self.countEntityDeclaration(SMCONSTS.COMMENT_REGEX, "newLine")
return counter
def countEntityDeclaration(self, regEx, entityType):
compiledRE = re.compile(regEx)
Utilities.myPrint("Identified " + entityType + " declarations: " + str(compiledRE.findall(self.fileText)) + \
" Size: " + str(len(compiledRE.findall(self.fileText))))
return len(compiledRE.findall(self.fileText))
def getFileResourceList(self):
compiledRE = re.compile(SMCONSTS.FILE_REGEX)
fileResourceList = []
for match in (compiledRE.findall(self.fileText)):
fileResourceText = self.extractResourceText(match)
Utilities.myPrint("Extracted file declaration: " + fileResourceText)
fileResourceObj = SourceModel.SM_FileResource.SM_FileResource(fileResourceText)
fileResourceList.append(fileResourceObj)
return fileResourceList
def extractResourceText(self, initialString):
index = self.fileText.find(initialString)
if index < 0:
return initialString
compiledRE1 = re.compile(r'\{')
compiledRE2 = re.compile(r'\}')
curBracketCount = len(compiledRE1.findall(initialString)) - len(compiledRE2.findall(initialString))
curIndex = index + len(initialString) + 1
if curBracketCount == 0:
#This is to find the first "{" since currently there is no { which may happen in case of multi-line def
found = False
while curIndex < len(self.fileText) and not foun
|
d:
if self.fileText[curIndex] == '{':
found = True
curBracketC
|
ount = 1
curIndex += 1
while curBracketCount > 0 and curIndex < len(self.fileText):
if self.fileText[curIndex] == '}':
curBracketCount -= 1
if self.fileText[curIndex] == '{':
curBracketCount += 1
curIndex +=1
return self.fileText[index:curIndex]
def getServiceResourceList(self):
compiledRE = re.compile(SMCONSTS.SERVICE_REGEX)
serviceResourceList = []
for match in (compiledRE.findall(self.fileText)):
serviceResourceText = self.extractResourceText(match)
Utilities.myPrint("Extracted service declaration: " + serviceResourceText)
serviceResourceObj = SourceModel.SM_ServiceResource.SM_ServiceResource(serviceResourceText)
serviceResourceList.append(serviceResourceObj)
return serviceResourceList
def getPackageResourceList(self):
compiledRE = re.compile(SMCONSTS.PACKAGE_REGEX)
packageResourceList = []
for match in (compiledRE.findall(self.fileText)):
packageResourceText = self.extractResourceText(match)
Utilities.myPrint("Extracted package declaration: " + packageResourceText)
packageResourceObj = SourceModel.SM_PackageResource.SM_PackageResource(packageResourceText)
packageResourceList.append(packageResourceObj)
return packageResourceList
def getClassDeclarationList(self):
compiledRE = re.compile(SMCONSTS.CLASS_REGEX)
compiledClassNameRE = re.compile(SMCONSTS.CLASS_NAME_REGEX)
classList = []
for match in compiledRE.findall(self.fileText):
className = compiledClassNameRE.findall(match)[0]
#print("Class name: %s" % (className))
classText = self.extractResourceText(match)
Utilities.myPrint("Extracted class declaration: " + classText)
classObj = SourceModel.SM_Class.SM_Class(classText, className)
classList.append(classObj)
return classList
def getDefineDeclarationList(self):
compiledRE = re.compile(SMCONSTS.DEFINE_REGEX)
defineList = []
for match in compiledRE.findall(self.fileText):
defineText, s, e = self.extractElementText(match)
Utilities.myPrint("Extracted define declaration: " + defineText)
defineObj = SourceModel.SM_Define.SM_Define(defineText)
defineList.append(defineObj)
return defineList
def getLCOM(self):
return SourceModel.SM_LCOM.getLCOM(self.getOuterElementList())
def getBodyTextSize(self):
loc = self.getLinesOfCode()
return loc, len(self.resourceBodyText)
def getOuterClassList(self):
outerElementList = self.getOuterElementList()
classList = []
for element in outerElementList:
if type(element) is SourceModel.SM_Class.SM_Class:
classList.append(element)
return classList
def getOuterDefineList(self):
outerElementList = self.getOuterElementList()
defineList = []
for element in outerElementList:
if type(element) is SourceModel.SM_Define.SM_Define:
defineList.append(element)
return defineList
# exElementList = []
# exElementList.extend(self.getElementList(SMCONSTS.DEFINE_REGEX))
# filteredList = self.filterOutInnerElements(exElementList)
# return filteredList
def getOuterElementList(self):
exElementList = []
exElementList.extend(self.getElementList(SMCONSTS.CLASS_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.SERVICE_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.CASE_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.DEFINE_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.EXEC_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.FILE_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.IF_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.PACKAGE_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.USER_REGEX))
filteredList = self.filterOutInnerElements(exElementList)
return filteredList
def getElementList(self, regex):
compiledRE = re.compile(regex)
exElementList = []
for str in (compiledRE.findall(self.fileText)):
|
edwardsdl/cryptopals
|
cryptopals/tests/test_common.py
|
Python
|
mit
| 1,109
| 0.003607
|
import pytest
import cryptopals.common as common
def test_base64_from_hex():
assert 'SGVsbG8sIHdvcmxkIQ==' == common.base64_from_hex('48656c6c6f2c20776f726c6421')
@pytest.mark.parametrize('first_bytes, second_bytes, expected_output', [
(b'Hello,', b'World!
|
', 17),
(b'foo', b'bar', 8),
(b'baz', b'qux', 6)
])
def test_compute_hamming_distance(first_bytes, second_bytes, expected_output):
assert common.compute_hamming_distance(first_bytes, second_bytes) == expected_output
@pytest.mark.parametrize('message, expected_o
|
utput', [
('Now is the time for all good men to come to the aid of their country', 13),
('The quick brown fox jumps over the lazy dog', 4),
('ETAOIN SHRDLU', 0)
])
def test_score_message(message, expected_output):
assert common.score_message_using_word_list(message) == expected_output
def test_score_message_using_frequency_analysis():
assert common.score_message_using_frequency_analysis('Defend the east wall of the castle') == 23.610265850026284
def test_word_list_contains_1000_entries():
assert 1000 == len(common.get_word_list())
|
coll-gate/collgate
|
server/accession/models.py
|
Python
|
mit
| 31,423
| 0.002355
|
# -*- coding: utf-8; -*-
#
# @file models.py
# @brief coll-gate accession module models.
# @author Frédéric SCHERMA (INRA UMR1095), Medhi BOULNEMOUR (INRA UMR1095)
# @date 2016-09-01
# @copyright Copyright (c) 2016 INRA/CIRAD
# @license MIT (see LICENSE file)
# @details
import re
from django.contrib.auth.models import User
from django.contrib.postgres.fields import JSONField
from django.db import models
from django.db.models import Q, Prefetch
from django.utils import translation
from django.utils.translation import ugettext_lazy as _
from igdectk.common.models import ChoiceEnum, IntegerChoice
from accession import localsettings
from classification.models import ClassificationEntry
from descriptor.models import DescribableEntity
from descriptor.models import Layout
from main.models import Entity, EntitySynonym, ContentType, EntitySynonymType
class AccessionClassificationEntry(models.Model):
"""
M2M accession to classification entry with additional flags.
"""
# accession object
accession = models.ForeignKey('Accession', on_delete=models.PROTECT)
# classification entry object
classification_entry = models.ForeignKey(ClassificationEntry, on_delete=models.PROTECT)
# is a primary or secondary classification association
primary = models.BooleanField(default=False, db_index=True)
def natural_name(self):
return self.accession.name
class Meta:
index_together = (
('accession', 'classification_entry'),
('accession', 'primary')
)
class Accession(DescribableEntity):
"""
Accession entity defines a physical or virtual accession.
"""
# name pattern
NAME_RE = re.compile(r"^\S+.+\S+$", re.IGNORECASE)
# default name validator
NAME_VALIDATOR = {"type": "string", "minLength": 1, "maxLength": 255, "pattern": "^\S+.+\S+$"}
# non-unique primary name of the accession
name = models.CharField(max_length=255, db_index=True)
# unique GRC code of the accession
code = models.CharField(unique=True, max_length=255, db_index=True)
# primary classification as simple FK for a simple join
primary_classification_entry = models.ForeignKey(
ClassificationEntry, on_delete=models.PROTECT, related_name='primary_accessions', null=True)
# accession can have many classification but at least a primary
classifications_entries = models.ManyToManyField(
through=AccessionClassificationEntry, to=ClassificationEntry, related_name='accession_set')
@classmethod
def get_defaults_columns(cls):
columns = {
'primary_classification_entry': {
'label': _('Classification principal'),
'field': 'name',
'query': True, # False, # could be later, for the moment LEFT JOIN into the queryset
'format': {
'type': 'entity',
'model': 'classification.classificationentry',
'details': True
},
'available_operators': ['isnull', 'notnull', 'eq', 'neq', 'in', 'notin']
},
'layout': {
'label': _('Layout'),
'field': 'name',
'query': True,
'format': {
'type': 'layout',
'model': 'accession.accession'
},
'available_operators': ['isnull', 'notnull', 'eq', 'neq', 'in', 'notin']
},
'name': {
'label': _('Name'),
'query': False, # done by a prefetch related
'format': {
'type': 'string',
'model': 'accession.accession'
},
'available_operators': ['isnull', 'notnull', 'eq', 'neq', 'icontains']
},
'code': {
'label': _('Code'),
'query': False, # done by a prefetch related
'format': {
'type': 'string',
'model': 'accession.accession'
},
'available_operators': ['isnull', 'notnull', 'eq', 'neq', 'icontains']
},
'panels': {
'label': _('Linked panels'),
'field': 'name',
'query': False, # done by a prefetch related
'format': {
'type': 'entity',
'model': 'accession.accessionpanel'
},
'available_operators': [
'contains',
'not_contains',
'overlap',
'not_overlap'
],
'column_display': False,
'search_display': True
},
'classifications': {
'label': _('Classifications'),
'field': 'name',
'query': False, # done by a prefetch related
'format': {
'type': 'entity',
'model': 'classification.classificationentry',
},
'available_operators': [
'contains',
'not_contains',
'overlap',
'not_overlap'
],
'column_display': False,
'search_display': True
}
}
synonym_types = EntitySynonymType.objects.filter(target_model=ContentType.objects.get_for_model(Accession))
for synonym_type in synonym_types:
columns['&' + synonym_type.name] = {
'label': synonym_type.get_label(),
# 'field': 'synonym',
'query': False,
'format': {
'type': 'string',
'model': 'accession.accessionsynonym',
},
'available_operators': ['isnull', 'notnull', 'eq', 'neq', 'icontains']
}
if synonym_type.multiple_entry:
columns['&' + synonym_type.name]['column_display'] = False
columns['&' + synonym_type.name]['search_display'] = True
return columns
class Meta:
verbose_name = _("accession")
permissions = (
("get_accession", "Can get an accession"),
("list_accession", "Can list accessions"),
("search_accession", "Can search for accessions")
)
def natural_name(self):
return self.name
def details(self):
return {
'id': self.id,
'name': self.name,
}
@classmethod
def make_search_by_name(cls, term):
return Q(name__istartswith=term)
def audit_create(self, user):
return {
'name': self.name,
'code': self.code,
'primary_classification_entry': self.primary_classification_entry_id,
'layout': self.layout_id,
'descriptors': self.descriptors,
'comments': self.comments
}
def audit_update(self, user):
if hasattr(self, 'updated_fields'):
result = {'updated_fields': self.updated_fields}
if 'code' in self.updated_fields:
result['code'] = self.code
if 'name' in self.updated_fields:
result['name'] = self.name
if 'primary_classification_entry' in self.updated_fields:
result['primary_classification_entry'] = self.primary_classification_entry_id
if 'descriptors' in self.updated_fields:
if hasattr(self, 'updated_descriptors'):
result['descriptors'] = self.updated_descriptors
else:
result['descriptors'] = self.descriptors
if 'comments' in self.updated_fields:
|
result['comments'] = self.comments
return result
else:
return {
'name': self.name,
'code': self.code,
'primary_classification_entry': self.primary_classification_entr
|
y_id,
'descriptors': self.desc
|
zmarvel/slowboy
|
tests/test_gpu.py
|
Python
|
mit
| 7,556
| 0.000794
|
import unittest
import slowboy.gpu
import slowboy.interrupts
from tests.mock_interrupt_controller import MockInterruptController
STAT_IE_ALL_MASK = (slowboy.gpu.STAT_LYC_IE_MASK |
slowboy.gpu.STAT_OAM_IE_MASK |
slowboy.gpu.STAT_HBLANK_IE_MASK |
slowboy.gpu.STAT_VBLANK_IE_MASK)
class TestGPU(unittest.TestCase):
def setUp(self):
self.gpu = slowboy.gpu.GPU()
self.interrupt_controller = MockInterruptController()
def test_constructor(self):
self.assertEqual(len(self.gpu.vram), 0x2000)
self.assertEqual(len(self.gpu.oam), 0xa0)
self.assertEqual(self.gpu.lcdc, 0x91)
self.assertEqual(self.gpu.scy, 0x00)
self.assertEqual(self.gpu.scx, 0x00)
self.assertEqual(self.gpu.ly, 0x00)
self.assertEqual(self.gpu.lyc, 0x00)
self.assertEqual(self.gpu.bgp, 0xfc)
self.assertEqual(self.gpu.obp0, 0xff)
self.assertEqual(self.gpu.obp1, 0xff)
self.assertEqual(self.gpu.wy, 0x00)
self.assertEqual(self.gpu.wx, 0x00)
# LYC=LY, Mode.OAM_READ
self.assertEqual(self.gpu.stat, 0x04 | 0x02)
self.assertEqual(self.gpu.mode, slowboy.gpu.Mode.OAM_READ)
self.assertEqual(self.gpu.mode_clock, 0)
def test_mode(self):
# Force ClockListener.notify and verify mode state transitions
for i in range(144):
# OAM_READ (2)
self.assertEqual(self.gpu.mode, slowboy.gpu.Mode.OAM_READ)
self.assertEqual(self.gpu.mode_clock, 0)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.OAM_READ.value)
# OAM_VRAM_READ (3)
self.gpu.notify(0, 80)
self.assertEqual(self.gpu.mode, slowboy.gpu.Mode.OAM_VRAM_READ)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.OAM_VRAM_READ.value)
self.assertEqual(self.gpu.mode_clock, 0)
# HBLANK (0)
self.gpu.notify(0, 172)
self.assertEqual(self.gpu.mode, slowboy.gpu.Mode.H_BLANK)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.H_BLANK.value)
self.assertEqual(self.gpu.mode_clock, 0)
self.gpu.notify(0, 204)
# VBLANK (1)
self.assertEqual(self.gpu.mode, slowboy.gpu.Mode.V_BLANK)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.V_BLANK.value)
self.assertEqual(self.gpu.mode_clock, 0)
def test_stat_mode(self):
# Initial mode is OAM_READ
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.OAM_READ.value)
self.gpu.mode = slowboy.gpu.Mode.OAM_VRAM_READ
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.OAM_VRAM_READ.value)
self.gpu.mode = slowboy.gpu.Mode.H_BLANK
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.H_BLANK.value)
self.gpu.mode = slowboy.gpu.Mode.V_BLANK
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.V_BLANK.value)
def test_stat_oam_interrupt(self):
self.gpu.load_interrupt_controller(self.interrupt_controller)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_OAM_IE_MASK, 0)
self.gpu.stat |= slowboy.gpu.STAT_OAM_IE_MASK
self.gpu.mode = slowboy.gpu.Mode.OAM_READ
self.assertEqual(self.interrupt_controller.last_interrupt,
slowboy.interrupts.InterruptType.stat)
def test_stat_lyc_interrupt(self):
s
|
elf.gpu.load_interrupt_controller(self.interrupt_controller)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_LYC_IE_MASK, 0)
self.gpu.stat |= slowboy.gpu.STAT_LYC_IE_MASK
self.gpu.ly = self.gpu.lyc
self.a
|
ssertEqual(self.interrupt_controller.last_interrupt,
slowboy.interrupts.InterruptType.stat)
def test_stat_hblank_interrupt(self):
self.gpu.load_interrupt_controller(self.interrupt_controller)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_HBLANK_IE_MASK, 0)
self.gpu.stat |= slowboy.gpu.STAT_HBLANK_IE_MASK
self.gpu.mode = slowboy.gpu.Mode.H_BLANK
self.assertEqual(self.interrupt_controller.last_interrupt,
slowboy.interrupts.InterruptType.stat)
def test_stat_vblank_interrupt(self):
self.gpu.load_interrupt_controller(self.interrupt_controller)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_VBLANK_IE_MASK, 0)
self.gpu.stat |= slowboy.gpu.STAT_VBLANK_IE_MASK
self.gpu.mode = slowboy.gpu.Mode.V_BLANK
self.assertEqual(self.interrupt_controller.last_interrupt,
slowboy.interrupts.InterruptType.stat)
def test__update_vram(self):
# TODO
self.fail('Not implemented: test__update_vram')
def test_colorto8bit(self):
self.assertRaises(ValueError, slowboy.gpu.colorto8bit, 4)
self.assertEqual(slowboy.gpu.colorto8bit(0), 0xff)
self.assertEqual(slowboy.gpu.colorto8bit(1), 0xaa)
self.assertEqual(slowboy.gpu.colorto8bit(2), 0x55)
self.assertEqual(slowboy.gpu.colorto8bit(3), 0x00)
def test_bgp(self):
# 11 11 11 00
self.assertEqual(self.gpu.bgp, 0xfc)
self.assertEqual(self.gpu._palette, [0xff, 0x00, 0x00, 0x00])
# 00 01 10 11
self.gpu.bgp = 0x1b
self.assertEqual(self.gpu.bgp, 0x1b)
self.assertEqual(self.gpu._palette, [0x00, 0x55, 0xaa, 0xff])
def test_obp(self):
self.assertEqual(self.gpu.obp0, 0xff)
self.assertEqual(self.gpu._sprite_palette0, [0xff, 0x00, 0x00, 0x00])
self.assertEqual(self.gpu.obp1, 0xff)
self.assertEqual(self.gpu._sprite_palette1, [0xff, 0x00, 0x00, 0x00])
# 00 01 10 11
self.gpu.obp0 = 0x1b
self.assertEqual(self.gpu.obp0, 0x1b)
self.assertEqual(self.gpu._sprite_palette0, [0xff, 0x55, 0xaa, 0xff])
# 11 10 01 00
self.gpu.obp1 = 0xe4
self.assertEqual(self.gpu.obp1, 0xe4)
self.assertEqual(self.gpu._sprite_palette1, [0xff, 0xaa, 0x55, 0x00])
def test_ly_lyc(self):
self.assertEqual(self.gpu.ly, 0)
# Changing LYC so that LYC != LY should clear STAT LYC flag
self.gpu.lyc = 5
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_LYC_FLAG_MASK, 0)
# Make LY = LYC -- STAT LYC flag should be set
self.gpu.ly = 5
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_LYC_FLAG_MASK,
slowboy.gpu.STAT_LYC_FLAG_MASK)
# Changing LY so that LYC != LY should *also* clear STAT LYC flag
self.gpu.ly = 6
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_LYC_FLAG_MASK, 0)
# Make LYC = LY -- should also set STAT LYC flag
self.gpu.lyc = 6
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_LYC_FLAG_MASK,
slowboy.gpu.STAT_LYC_FLAG_MASK)
def test_wx_wy(self):
self.assertEqual(self.gpu.wx, 0)
self.assertEqual(self.gpu.wy, 0)
self.gpu.wx = 7
self.assertEqual(self.gpu._wx, 0)
self.gpu.wy = 0
self.assertEqual(self.gpu._wy, 0)
|
thesecuritystoic/Packet2Snort
|
packet2snort.py
|
Python
|
gpl-3.0
| 9,018
| 0.026503
|
try:
from scapy.all import *
except ImportError:
sys.stderr.write("ERROR: You must have scapy installed.\n")
sys.stderr.write("You can install it by running: sudo pip install -U 'scapy>=2.3,<2.4'")
exit(1)
try:
from scapy.layers import http
except ImportError:
sys.stderr.write("ERROR: You must have scapy-http installed.\n")
sys.stderr.write("You can install it by running: sudo pip install -U 'scapy>=1.8'")
exit(1)
import getopt
import sys
def usage():
print "----\/---- Packet2Snort ----\/----"
print "\nThis script parses a network packet from a PCAP file into a useable Snort rule for incident response, threat hunting and detection."
print "\nRequirements: \n- Scapy \n- Scapy-HTTP \n- Python 2.7"
print "\nUsage:\npacket2snort.py <options>\n"
print "Arguments: \n"
print "-r <pcap> input pcap file"
print "-p <packetnr> input packet number in pcap"
print "-s to output snort rule from single packet"
sys.exit(0)
#converts layer 3 and 4 protocols into rules:
# IP, TCP, UDP & ICMP
def basicconvert(singlepacket, packetnr0):
try:
print ("\n{1}----- Snort Rules For Packet Number {0}-----{2}".format(packetnr0, G, W))
# Print IP Layer Rules
# Check if the IP layer is present in the packet
if IP in singlepacket:
print ("{0}----- Layer 3/4 Rules -------{1}".format(G, W))
ipsource = singlepacket[IP].src
ipdest = singlepacket[IP].dst
# Print TCP Layer Rules
# Check if TCP is present in the packet
if TCP in singlepacket:
print ("{0}----- TCP ---\n{1}".format(G, W))
tcpsourceport = singlepacket[TCP].sport
tcpdestport = singlepacket[TCP].dport
print ("alert tcp {0} {1}-> $HOME_NET any (msg: \"Suspicious IP {0} and port {1} detected!\"; reference:Packet2Snort; classtype:trojan-activity; sid:xxxx; rev:1;)".format(tcpsourceport, tcpdestport))
print ("alert tcp $HOME_NET any -> {0} {1} (msg: \"Suspicious IP {0} and port {1} detected!\"; reference:Packet2Snort; classtype:trojan-activity; sid:xxxx; rev:1;)".format(ipdest, tcpdestport))
# Check if DNS is present in the packet
if DNS in singlepacket:
print ("{0}----- DNS ---\n{1}".format(G, W))
hostname = singlepacket[DNSQR].qname
if DNSRR in singlepacket:
hostaddr = singlepacket[DNSRR].rdata
print ("alert udp any 53 -> $HOME_NET any (msg: \"Suspicious DNS reply for {0} with address {1} detected!\"; content:\"|00 01 00 01|\"; content:\"|00 04".format(hostname, hostaddr)),
addrsplit = hostaddr.split('.')
for addr in addrsplit:
hexaddr = format(int(addr), '02x')
print "\b",hexaddr.upper(),
print "\b|\"; distance:4; reference:Packet2Snort; classtype:trojan-activity; sid:xxxx; rev:1;)"
else:
print ("alert udp $HOME_NET any -> any 53 (msg: \"Suspicious DNS request for {0} detected!\"; content:\"|01 00 00 01 00 00 00 00 00 00|\"; depth:10; offset:2; content:\"".format(hostname)),
dnsplit = hostname.split('.')
for word in dnsplit:
if word != '':
numbers = len(word)
hexa = format(numbers, '02x')
upper = hexa.upper()
print ("\b|{0}|{1}".format(upper, word)),
print "\b\"; fast_pattern; nocase; distance:0; reference:Packet2Snort; classtype:trojan-activity; sid:xxxx; rev:1;)"
# Check if a HTTP request is present in the packet
elif singlepacket.haslayer(http.HTTPRequest):
print ("\n{0}----- Layer 7 Rules -----{1}".format(G, W))
print ("{0}----- HTTP -----\n{1}".format(G, W))
httppacket = singlepacket.getlayer(http.HTTPRequest)
print ("Host:\nalert tcp $HOME_NET any -> any $HTTP_PORTS (msg: \"Suspicious HTTP {0[Method]} request to {0[Host]} detected!\"; flow:established,to_server; content:\"Host|3a 20|{0[Host]}|0d 0a|\"; http_header; reference:Packet2Snort; classtype:trojan-activity; sid:xxxx; rev:1;)".format(httppacket.fields))
print ("\nFilename:\nalert tcp $HOME_NET any -> any $HTTP_PORTS (msg: \"Suspicious HTTP file name \"{0[Path]}\" requested at {0[Host]}!\"; flow:established,to_server; content:\"{0[Path]}\"; http_uri; reference:Packet2Snort; classtype:trojan-activity; sid:xxxx; rev:1;)".format(httppacket.fields))
# Check if a HTTP response is present in the packet (Currently not active)
# elif singlepacket.haslayer(http.HTTPResponse):
# print "\n------ Layer 7 Rules ------"
# print "\n--- HTTP ---\n"
# httppacket2 = singlepacket.getlayer(http.HTTPResponse)
# print httppacket2
# Print UDP Layer Rules
# Check if UDP is present in the packet
elif UDP in singlepacket:
print ("{0}----- UDP -----\n{1}".format(G, W))
udpsrcport = singlepacket[UDP].sport
udpdestport = singlepacket[UDP].dport
print ("alert udp {0} {1} -> any any (msg: \"Suspicious IP {0} and port {1} detected!\"; reference:Packet2Snort; classtype:trojan-activity; sid:xxxx; rev:1;)".format(ipsource, udpsrcport))
print ("alert udp any any -> {0} {1} (msg: \"Suspicious IP {0} and port {1} detected!\"; reference:Packet2Snort; classtype:trojan-activity; sid:xxxx; rev:1;)".format(ipdest, udpdestport))
# Check if DNS is present in the packet
if DNS in singlepacket:
print ("{0}----- DNS -----\n{1}".format(G, W))
hostname = singlepacket[DNSQR].qname
if DNSRR in singlepacket:
hostaddr = singlepacket[DNSRR].rdata
print ("alert udp any 53 -> $HOME_NET any (msg: \"Suspicious DNS reply for {0} with address {1} detected!\"; content:\"|00 01 00 01|\"; content:\"|00 04".format(hostname, hostaddr)),
addrsplit = hostaddr.split('.')
for addr in addrsplit:
hexaddr = format(int(addr), '02x')
print "\b",hexaddr.upper(),
print "\b|\"; distan
|
ce:4; reference:Packet2S
|
nort; classtype:trojan-activity; sid:xxxx; rev:1;)"
else:
print ("alert udp $HOME_NET any -> any 53 (msg: \"Suspicious DNS request for {0} detected!\"; content:\"|01 00 00 01 00 00 00 00 00 00|\"; depth:10; offset:2; content:\"".format(hostname)),
dnsplit = hostname.split('.')
for word in dnsplit:
if word != '':
numbers = len(word)
hexa = format(numbers, '02x')
upper = hexa.upper()
print ("\b|{0}|{1}".format(upper, word)),
print "\b|00|\"; fast_pattern; nocase; distance:0; reference:Packet2Snort; classtype:trojan-activity; sid:xxxx; rev:1;)"
# Print ICMP Layer Rules
# Check if ICMP is present in the packet
elif ICMP in singlepacket:
print ("{0}----- ICMP -----\n{1}".format(G, W))
icmptype = singlepacket[ICMP].type
print ("alert icmp {0} any -> {1} any (msg: \"Suspicious ICMP packet from {0} to {1} with type {2}!\"; icode:0; itype:{2}; reference:Packet2Snort; classtype:trojan-activity; sid:xxxx; rev:1;)".format(ipsource, ipdest, icmptype))
# Throw error when no L4 protocols found
else:
print ("{0}No UDP/TCP Layer 4 Protocol Found!{1}".format(O, W))
sys.exit(1)
# Throw error when no IP found
else:
print ("{0}No IP Layer 3 Protocol Found!{1}".format(O, W))
sys.exit(1)
print ("\n{0}Don't forget to change the sid of the generated rule(s)!{1}".format(O, W))
# Print error when they occur
except Exception, e:
print "Error: ", e
print "\n"
usage()
pass
#Let user input pcap
def main():
try:
#Let user input pcap
cap = None
packetnr = None
protocol = None
snortoutput = False
options, arguments = getopt.getopt(sys.argv[1:], "r:p:P:sh")
#Check if argument is given and fill variables with arguments
if len(sys.argv) == 1:
usage()
for opt, args in options:
if opt in ('-r'):
cap = args
elif opt in ('-p'):
packetnr = args.split(',')
elif opt in ('-h'):
usage()
elif opt in ('-s'):
snortoutput = True
else:
print "No arguments given"
sys.exit(1)
# Check if pcap file exists
if cap:
if os.path.isfile(cap):
scapy_cap = rdpcap(cap)
else:
print "Error:", cap, "doest not exist."
sys.exit(1)
#Output summary of pcap
print (O + "--------")
print "Summary: " + str(scapy_cap)
print ("--------" + W)
# Check if a packet number has been supplied, and thus the variable packetnr is filled
if packetnr != None:
for i in packetnr:
packetnr0 = int(i) - 1
singlepacket = scapy_cap[int(packetnr0)]
# Ch
|
ZacBlanco/adac
|
adac/consensus/__init__.py
|
Python
|
mit
| 86
| 0.011628
|
'''Thi
|
s module contains differnet implementations of distributed average cons
|
ensus'''
|
timj/scons
|
test/packaging/strip-install-dir.py
|
Python
|
mit
| 2,169
| 0.003688
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test stripping the InstallBuilder of the Package source file.
"""
import TestSCons
python = TestSCons.python
test = TestSCons.TestSCons()
tar = test.det
|
ect('TAR', 'tar')
if not tar:
test.skip_test('tar not found, skipping test\n')
test.write( 'main.c', '' )
test.write('SConstruct', """
prog = Install( '/bin', 'main.c' )
env=Environment(tools=['default', 'packaging'])
env.Package( NAME = 'foo',
VERSION = '1.2.3',
source = [ prog ],
)
""")
expected = """scons: Reading SConscript files ...
scons: done reading SConscript files.
scons: Building targets ...
Copy file(s): "main.c" to "foo-1.2.3/bin/main.c"
tar -zc -f foo-1.2.3.tar.gz foo-1.2.3/bin/main.c
scons: done building targets.
"""
test.run(arguments='', stderr = None, stdout=expected)
test.must_not_exist( 'bin/main.c' )
test.must_not_exist( '/bin/main.c' )
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
alextricity25/parse_apache_configs
|
parse_apache_configs/test/test_parse_config.py
|
Python
|
apache-2.0
| 1,118
| 0.013417
|
from os import listdir
from os.path import isfile, join
import unittest
from parse_apache_configs import parse_config
import pprint
class testParseConfig(unittest.TestCase):
#print "ENTERING TEST_PARSE_CONFIG" + "-"*8
def test_parse_config(self):
test_files = [ f fo
|
r f in listdir("./test_conf_files") if isfile(join("./test_conf_files", f)) ]
for file_name in test_files:
pac = parse_config.ParseApacheConfig("./test
|
_conf_files/" + file_name)
conf_list = pac.parse_config()
def test_parse_config_string_file(self):
test_files = [ f for f in listdir("./test_conf_files") if isfile(join("./test_conf_files", f)) ]
for file_name in test_files:
full_file_path = "./test_conf_files/" + file_name
with open(full_file_path, 'r') as fp:
file_as_string = fp.read()
pac = parse_config.ParseApacheConfig(apache_file_as_string=file_as_string)
conf_list = pac.parse_config()
#print conf_list
#pp(conf_list)
#TODO make sure we get back the right netstedList
|
vpetersson/docker-py
|
docker/api/network.py
|
Python
|
apache-2.0
| 10,566
| 0
|
from ..errors import InvalidVersion
from ..utils import check_resource, minimum_version
from ..utils import version_lt
from .. import utils
class NetworkApiMixin(object):
@minimum_version('1.21')
def networks(self, names=None, ids=None, filters=None):
"""
List networks. Similar to the ``docker networks ls`` command.
Args:
names (:py:class:`list`): List of names to filter by
ids (:py:class:`list`): List of ids to filter by
filters (dict): Filters to be processed on the network list.
Available filters:
- ``driver=[<driver-name>]`` Matches a network's driver.
- ``label=[<key>]`` or ``label=[<key>=<value>]``.
- ``type=["custom"|"builtin"]`` Filters networks by type.
Returns:
(dict): List of network objects.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
if filters is None:
filters = {}
if names:
filters['name'] = names
if ids:
filters['id'] = ids
params = {'filters': utils.convert_filters(filters)}
url = self._url("/networks")
res = self._get(url, params=params)
return self._result(res, json=True)
@minimum_version('1.21')
def create_network(self, name, driver=None, options=None, ipam=None,
check_duplicate=None, internal=False, labels=None,
enable_ipv6=False, attachable=None, scope=None,
ingress=None):
"""
Create a network. Similar to the ``docker network create``.
Args:
name (str): Name of the network
driver (str): Name of the driver used to create the network
options (dict): Driver options as a key-value dictionary
ipam (IPAMConfig): Optional custom IP scheme for the network.
check_duplicate (bool): Request daemon to check for networks with
same name. Default: ``None``.
internal (bool): Restrict external access to the network. Default
``False``.
labels (dict): Map of labels to set on the network. Default
``None``.
enable_ipv6 (bool): Enable IPv6 on the network. Default ``False``.
attachable (bool): If enabled, and the network is in the global
scope, non-service containers on worker nodes will be able to
connect to the network.
scope (str): Specify the network's scope (``local``, ``global`` or
``swarm``)
ingress (bool): If set, create an ingress network which provides
the routing-mesh in swarm mode.
Returns:
(dict): The created network reference object
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
A network using the bridge driver:
>>> client.create_network("network1", driver="bridge")
You can also create more advanced networks with custom IPAM
configurations. For example, setting the subnet to
``192.168.52.0/24`` and gateway address to ``192.168.52.254``.
.. code-block:: python
>>> ipam_pool = docker.types.IPAMPool(
subnet='192.168.52.0/24',
gateway='192.168.52.254'
)
>>> ipam_config = docker.types.IPAMConfig(
pool_configs=[ipam_pool]
)
>>> docker_client.create_network("network1", driver="bridge",
ipam=ipam_config)
"""
if options is not None and not isinstance(options, dict):
raise TypeError('options must be a dictionary')
data = {
'Name': name,
'Driver': driver,
'Options': options,
'IPAM': ipam,
'CheckDuplicate': check_duplicate,
}
if labels is not None:
if version_lt(self._version, '1.23'):
raise InvalidVersion(
'network labels were introduced in API 1.23'
)
if not isinstance(labels, dict):
raise TypeError('labels must be a dictionary')
data["Labels"] = labels
if enable_ipv6:
if version_lt(self._version, '1.23'):
raise InvalidVersion(
'enable_ipv6 was introduced in API 1.23'
)
data['EnableIPv6'] = True
if internal:
if version_lt(self._version, '1.22'):
raise InvalidVersion('Internal networks are not '
'supported in API version < 1.22')
data['Internal'] = True
if attachable is not None:
if version_lt(self._version, '1.24'):
raise InvalidVersion(
'attachable is not supported in API version < 1.24'
)
data['Attachable'] = attachable
if ingress is not None:
if version_lt(self._version, '1.29'):
raise InvalidVersion(
'ingress is not supported in API version < 1.29'
)
data['Ingress'] = ingress
if scope is not None:
if version_lt(self._version, '1.30'):
raise InvalidVersion(
'scope is not supported in API version < 1.30'
)
data['Scope'] = scope
url = self._url("/networks/create")
res = self._post_json(url, data=data)
return self._result(res, json=True)
@minimum_version('1.25')
def prune_networks(self, filters=None):
"""
Delete unused networks
Args:
filters (dict): Filters to process on the prune list.
Returns:
(dict): A dict containing a list of deleted network names and
the amount of disk space reclaimed in bytes.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
params = {}
if filters:
params['filters'] = utils.convert_filters(filters)
url = self._url('/networks/prune')
return self._result(self._post(url, params=params), True)
@minimum_version('1.21')
@check_resource('net_id')
def remove_network(self, net_id):
"""
Remove a network. Similar to the ``docker network rm`` command.
Args:
net_id (str): The network's i
|
d
"""
url = self._url("/networks/{0}", net_id)
res = self._delete(url)
self._raise_for_status(res)
@minimum_version('1.21')
@check_resource('net_id')
def inspect_network(self, net_id, verbose=None, scope=None):
"""
Get detailed information about a network.
Args:
net_id (str): ID of network
verbose (bool): Show the service details across the cluster in
|
swarm mode.
scope (str): Filter the network by scope (``swarm``, ``global``
or ``local``).
"""
params = {}
if verbose is not None:
if version_lt(self._version, '1.28'):
raise InvalidVersion('verbose was introduced in API 1.28')
params['verbose'] = verbose
if scope is not None:
if version_lt(self._version, '1.31'):
raise InvalidVersion('scope was introduced in API 1.31')
params['scope'] = scope
url = self._url("/networks/{0}", net_id)
res = self._get(url, params=params)
return self._result(res, json=True)
@check_resource('container')
@minimum_version('1.21')
def connect_container_to_network(self, container, net_id,
ipv4_address=None, ipv6_address=None,
aliases=None, links=None,
|
manuco/Pot-commun
|
testrunner.py
|
Python
|
gpl-3.0
| 332
| 0.009036
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from unittest import TestLoader, TextTestRunner
from potcommuntests
|
import Tests
runner = TextTestRunner()
testsSuite = TestLo
|
ader().loadTestsFromTestCase(Tests)
#testsSuite = TestLoader().loadTestsFromName("potcommuntests.Tests.test_with_some_missing_items")
runner.run(testsSuite)
|
Teekuningas/mne-python
|
mne/preprocessing/_fine_cal.py
|
Python
|
bsd-3-clause
| 2,954
| 0
|
# -*- coding: utf-8 -*-
# Authors: Eric Larson <larson.eric.d@gmail.com>
# License: BSD (3-clause)
import numpy as np
from ..utils import check_fname, _check_fname
def read_fine_calibration(fname):
"""Read fine calibration information from a .dat file.
The fine calibration typically includes improved sensor locations,
calibration coefficients, and gradiometer imbalance information.
Parameters
----------
fname : str
The filename.
Returns
-------
calibration : dict
Fine calibration information.
"""
# Read new sensor locations
fname = _check_fname(fname, overwrite='read', must_exist=True)
check_fname(fname, 'cal', ('.dat',))
ch_names = list()
locs = list()
imb_cals = list()
with open(fname, 'r') as fid:
for line in fid:
if line[0] in '#\n':
continue
vals = line.strip().split()
if len(vals) not in [14, 16]:
raise RuntimeError('Error parsing fine calibration file, '
'should have 14 or 16 entries per lin
|
e '
'but found %s on line:\n%s'
% (len(vals), line))
# `vals` contains channel number
ch_name = vals[0]
if len(ch_name) in (3, 4): # heur
|
istic for Neuromag fix
try:
ch_name = int(ch_name)
except ValueError: # something other than e.g. 113 or 2642
pass
else:
ch_name = 'MEG' + '%04d' % ch_name
ch_names.append(ch_name)
# (x, y, z), x-norm 3-vec, y-norm 3-vec, z-norm 3-vec
locs.append(np.array([float(x) for x in vals[1:13]]))
# and 1 or 3 imbalance terms
imb_cals.append([float(x) for x in vals[13:]])
locs = np.array(locs)
return dict(ch_names=ch_names, locs=locs, imb_cals=imb_cals)
def write_fine_calibration(fname, calibration):
"""Write fine calibration information to a .dat file.
Parameters
----------
fname : str
The filename to write out.
calibration : dict
Fine calibration information.
"""
fname = _check_fname(fname, overwrite=True)
check_fname(fname, 'cal', ('.dat',))
with open(fname, 'wb') as cal_file:
for ci, chan in enumerate(calibration['ch_names']):
# Write string containing 1) channel, 2) loc info, 3) calib info
# with field widths (e.g., %.6f) chosen to match how Elekta writes
# them out
cal_line = np.concatenate([calibration['locs'][ci],
calibration['imb_cals'][ci]]).round(6)
cal_str = str(chan) + ' ' + ' '.join(map(lambda x: "%.6f" % x,
cal_line))
cal_file.write((cal_str + '\n').encode('ASCII'))
|
MaCFP/macfp-db
|
Buoyant_Plumes/Sandia_Helium_Plume/Computational_Results/2021/SNL/SNL_plot_results.py
|
Python
|
mit
| 754
| 0.005305
|
#!/usr/bin/env python3
# McDermott
# March 2020
# first, make sure the macfp module directory is in your path
# if not, uncomment the lines below and re
|
place <path to macfp-db>
# with the path (absolute or relative) to your macfp-db repository
import sys
# sys.path.append('<path to macfp-db>/macfp-db/Util
|
ities/')
sys.path.append('../../../../../../macfp-db/Utilities/')
import macfp
import importlib
importlib.reload(macfp)
import matplotlib.pyplot as plt
macfp.dataplot(config_filename='SNL_dataplot_config.csv',
institute='Sandia National Laboratories',
expdir='../../../Experimental_Data/',
pltdir='./Plots/',
verbose=True,
close_figs=True
)
# plt.show()
|
danduggan/hltd
|
cgi/harakiri_cgi.py
|
Python
|
lgpl-3.0
| 593
| 0.005059
|
#!/usr/bin/env python2.6
import cgi
impor
|
t time
import os
import subprocess
"""
|
problem: cgi scripts run as user 'nobody'
how can we handle signaling the daemon ?
"""
form = cgi.FieldStorage()
print "Content-Type: text/html" # HTML is following
print
print "<TITLE>CGI script output</TITLE>"
print "Hey I'm still here !"
try:
if os.path.exists('harakiri'):
os.remove('harakiri')
fp = open('harakiri','w+')
fp.close()
except Exception as ex:
print "exception encountered in operating hltd\n"
print '<P>'
print ex
raise
|
ravyg/algorithms
|
python/238_productArrayExceptSelf.py
|
Python
|
gpl-3.0
| 721
| 0.015257
|
#!/usr/bin/python
# Given an array of n
|
integers where n > 1, nums, return an array output such that output[i] is equal to the product of all the elements of nums except nums[i].
class Solution(object):
# @param {integer[]} nums
# @return {integer[]}
def productExceptSelf(self, nums):
p
|
= 1
n = len(nums)
output = []
# Forward range.
for i in range(0,n):
output.append(p)
p = p * nums[i]
p = 1
# Backword range.
for i in range(n-1,-1,-1):
output[i] = output[i] * p
p = p * nums[i]
return output
nums=[2,3,4,5]
obj = Solution()
output = obj.productExceptSelf(nums)
print(output)
|
mcanthony/nupic
|
src/nupic/data/CategoryFilter.py
|
Python
|
agpl-3.0
| 2,276
| 0.003954
|
#! /usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software F
|
oundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for
|
more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
'''
A category filter can be applied to any categorical field
The basic operation is assumed to be: OR
In the final version users may input Boolean algebra to define this
behaviour
If your field is 'animals'
and your values are
1 - dogs
2 - cat
3 - mouse
4 - giraffe
5 - hippo
A category filter for dog,giraffe
would return records 1 and 4
Note that we're using a substring search so that dogs ~= dog
We can't know all the categories before hand so we present to the user a
freeform input box.
'''
class CategoryFilter(object):
def __init__(self, filterDict):
"""
TODO describe filterDict schema
"""
self.filterDict = filterDict
def match(self, record):
'''
Returns True if the record matches any of the provided filters
'''
for field, meta in self.filterDict.iteritems():
index = meta['index']
categories = meta['categories']
for category in categories:
# Record might be blank, handle this
if not record:
continue
if record[index].find(category) != -1:
'''
This field contains the string we're searching for
so we'll keep the records
'''
return True
# None of the categories were found in this record
return False
|
multispot-software/transfer_convert
|
analyze.py
|
Python
|
mit
| 3,069
| 0.000326
|
#!/usr/bin/env python
from pathlib import Path
import nbrun
default_notebook_name = 'smFRET-PAX_single_pop.ipynb'
def run_analysis(data_filename, input_notebook=None, save_html=False,
working_dir=None, suffix='', dry_run=False):
"""
Run analysis notebook on the passed data file.
Arguments:
data_filename (Path): path data file to be analyzed.
input_notebook (Path): path of the analysis notebook.
save_html (bool): if True save a copy of the output notebook in HTML.
working_dir (Path or None): working dir the kernel is started into.
If None (default), use the same folder as the data file.
|
dry_run (bool): just pretenting. Do not run or save any notebook.
"""
if input_notebook is None:
input_notebook = default_notebook_name
print(' * Running analysis for %s' % (data_filename.stem), flush=True)
|
if working_dir is None:
working_dir = data_filename.parent
out_path_html = Path(data_filename.parent, 'reports_html',
data_filename.stem + suffix + '.html')
out_path_nb = Path(data_filename.parent,
data_filename.stem + suffix + '.ipynb')
out_path_html.parent.mkdir(exist_ok=True, parents=True)
if not dry_run:
nbrun.run_notebook(input_notebook, display_links=False,
out_path_ipynb=out_path_nb,
out_path_html=out_path_html,
nb_kwargs={'fname': str(data_filename)},
save_html=save_html, working_dir=working_dir)
print(' [COMPLETED ANALYSIS] %s' % (data_filename.stem), flush=True)
if __name__ == '__main__':
import argparse
descr = """\
This script executes an analysis notebook on the specified HDF5 file.
"""
parser = argparse.ArgumentParser(description=descr, epilog='\n')
parser.add_argument('datafile',
help='Source folder with files to be processed.')
msg = ("Filename of the analysis notebook. If not specified, the default "
"notebook is '%s'." % default_notebook_name)
parser.add_argument('--notebook', metavar='NB_NAME',
default=default_notebook_name, help=msg)
parser.add_argument('--save-html', action='store_true',
help='Save a copy of the output notebooks in HTML.')
parser.add_argument('--working-dir', metavar='PATH', default=None,
help='Working dir for the kernel executing the notebook.')
parser.add_argument('--suffix', metavar='STRING', default='',
help='Notebook name suffix.')
args = parser.parse_args()
datafile = Path(args.datafile)
assert datafile.is_file(), 'Data file not found: %s' % datafile
notebook = Path(args.notebook)
assert notebook.is_file(), 'Notebook not found: %s' % notebook
run_analysis(datafile, input_notebook=notebook, suffix=args.suffix,
save_html=args.save_html, working_dir=args.working_dir)
|
jss-emr/openerp-7-src
|
openerp/addons/account/account_move_line.py
|
Python
|
agpl-3.0
| 70,268
| 0.005764
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sys
import time
from datetime import datetime
from operator import itemgetter
from lxml import etree
from openerp import netsvc
from openerp.osv import fields, osv, orm
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
from openerp import tools
class account_move_line(osv.osv):
_name = "account.move.line"
_description = "Journal Items"
def _query_get(self, cr, uid, obj='l', context=None):
fiscalyear_obj = self.pool.get('account.fiscalyear')
fiscalperiod_obj = self.pool.get('account.period')
account_obj = self.pool.get('account.account')
fiscalyear_ids = []
if context is None:
context = {}
initial_bal = context.get('initial_bal', False)
company_clause = " "
if context.get('company_id', False):
company_clause = " AND " +obj+".company_id = %s" % context.get('company_id', False)
if not context.get('fiscalyear', False):
if context.get('all_fiscalyear', False):
#this option is needed by the aged balance report because otherwise, if we search only the draft ones, an open invoice of a closed fiscalyear won't be displayed
fiscalyear_ids = fiscalyear_obj.search(cr, uid, [])
else:
fiscalyear_ids = fiscalyear_obj.search(cr, uid, [('state', '=', 'draft')])
else:
#for initial balance as well as for normal query, we check only the selected FY because the best practice is to generate the FY opening entries
fiscalyear_ids = [context['fiscalyear']]
fiscalyear_clause = (','.join([str(x) for x in fiscalyear_ids])) or '0'
state = context.get('state', False)
where_move_state = ''
where_move_lines_by_date = ''
if context.get('date_from', False) and context.get('date_to', False):
if initial_bal:
where_move_lines_by_date = " AND " +obj+".move_id IN (SELECT id FROM account_move WHERE date < '" +context['date_from']+"')"
else:
where_move_lines_by_date = " AND " +obj+".move_id IN (SELECT id FROM account_move WHERE date >= '" +context['date_from']+"' AND date <= '"+context['date_to']+"')"
if state:
if state.lower() not in ['all']:
where_move_state= " AND "+obj+".move_id IN (SELECT id FROM account_move WHERE account_move.state = '"+state+"')"
if context.get('period_from', False) and context.get('period_to', False) and not context.get('periods', False):
if initial_bal:
period_company_id = fiscalperiod_obj.browse(cr, uid, context[
|
'period_from'], context=context).company_id.id
first_period = fiscalperiod_obj.search(cr, uid, [('company_id', '=', period_company_id)], order='date_start', limit=1)[0]
context['periods'] = fiscalperiod_obj.build_ctx_peri
|
ods(cr, uid, first_period, context['period_from'])
else:
context['periods'] = fiscalperiod_obj.build_ctx_periods(cr, uid, context['period_from'], context['period_to'])
if context.get('periods', False):
if initial_bal:
query = obj+".state <> 'draft' AND "+obj+".period_id IN (SELECT id FROM account_period WHERE fiscalyear_id IN (%s)) %s %s" % (fiscalyear_clause, where_move_state, where_move_lines_by_date)
period_ids = fiscalperiod_obj.search(cr, uid, [('id', 'in', context['periods'])], order='date_start', limit=1)
if period_ids and period_ids[0]:
first_period = fiscalperiod_obj.browse(cr, uid, period_ids[0], context=context)
ids = ','.join([str(x) for x in context['periods']])
query = obj+".state <> 'draft' AND "+obj+".period_id IN (SELECT id FROM account_period WHERE fiscalyear_id IN (%s) AND date_start <= '%s' AND id NOT IN (%s)) %s %s" % (fiscalyear_clause, first_period.date_start, ids, where_move_state, where_move_lines_by_date)
else:
ids = ','.join([str(x) for x in context['periods']])
query = obj+".state <> 'draft' AND "+obj+".period_id IN (SELECT id FROM account_period WHERE fiscalyear_id IN (%s) AND id IN (%s)) %s %s" % (fiscalyear_clause, ids, where_move_state, where_move_lines_by_date)
else:
query = obj+".state <> 'draft' AND "+obj+".period_id IN (SELECT id FROM account_period WHERE fiscalyear_id IN (%s)) %s %s" % (fiscalyear_clause, where_move_state, where_move_lines_by_date)
if initial_bal and not context.get('periods', False) and not where_move_lines_by_date:
#we didn't pass any filter in the context, and the initial balance can't be computed using only the fiscalyear otherwise entries will be summed twice
#so we have to invalidate this query
raise osv.except_osv(_('Warning!'),_("You have not supplied enough arguments to compute the initial balance, please select a period and a journal in the context."))
if context.get('journal_ids', False):
query += ' AND '+obj+'.journal_id IN (%s)' % ','.join(map(str, context['journal_ids']))
if context.get('chart_account_id', False):
child_ids = account_obj._get_children_and_consol(cr, uid, [context['chart_account_id']], context=context)
query += ' AND '+obj+'.account_id IN (%s)' % ','.join(map(str, child_ids))
query += company_clause
return query
def _amount_residual(self, cr, uid, ids, field_names, args, context=None):
"""
This function returns the residual amount on a receivable or payable account.move.line.
By default, it returns an amount in the currency of this journal entry (maybe different
of the company currency), but if you pass 'residual_in_company_currency' = True in the
context then the returned amount will be in company currency.
"""
res = {}
if context is None:
context = {}
cur_obj = self.pool.get('res.currency')
for move_line in self.browse(cr, uid, ids, context=context):
res[move_line.id] = {
'amount_residual': 0.0,
'amount_residual_currency': 0.0,
}
if move_line.reconcile_id:
continue
if not move_line.account_id.type in ('payable', 'receivable'):
#this function does not suport to be used on move lines not related to payable or receivable accounts
continue
if move_line.currency_id:
move_line_total = move_line.amount_currency
sign = move_line.amount_currency < 0 and -1 or 1
else:
move_line_total = move_line.debit - move_line.credit
sign = (move_line.debit - move_line.credit) < 0 and -1 or 1
line_total_in_company_currency = move_line.debit - move_line.credit
context_unreconciled = context.copy()
if move_line.reconcile_partial_id:
for payment_line in move_line.reconcile_partial_id.line_partial_ids:
if payment_line.id ==
|
leapp-to/prototype
|
leapp/utils/meta.py
|
Python
|
lgpl-2.1
| 1,358
| 0.001473
|
import itertools
def with_metaclass(meta_class, base_class=obje
|
ct):
"""
:param meta_class: The desired metaclass to use
:param base_class: The desired base class to use, the default one is object
:type base_class: Type
:return: Metaclass type to inherit from
:Example:
.. code-blo
|
ck:: python
class MyMetaClass(type):
def __new__(mcs, name, bases, attrs):
klass = super(MyMetaClass, mcs).__new__(mcs, name, bases, attrs)
klass.added = "Added field"
return klass
class MyClass(with_metaclass(MyMetaClass)):
pass
# This is equivalent to python 2:
class MyClass(object):
__metaclass__ = MyMetaClass
# Or python 3
class MyClass(object, metaclass=MyMetaClass):
pass
"""
return meta_class(
'with_meta_base_' + base_class.__name__ + '_' + meta_class.__name__,
(base_class,),
{}
)
def get_flattened_subclasses(cls):
"""
Returns all the given subclasses and their subclasses recursively for the given class
:param cls: Class to check
:type cls: Type
:return: Flattened list of subclasses and their subclasses
"""
classes = cls.__subclasses__()
return list(itertools.chain(classes, *[get_flattened_subclasses(x) for x in classes]))
|
stxnext/intranet-open
|
src/intranet3/intranet3/asyncfetchers/fake.py
|
Python
|
mit
| 416
| 0
|
class FakeFetcher(object):
|
"""
Used i.e. in Harvest tracker when we need credentials but don
|
't fetcher
"""
def __init__(self, *args, **kwargs):
pass
def fetch_user_tickets(self, *args, **kwargs):
pass
def fetch_all_tickets(self, *args, **kwargs):
pass
def fetch_bugs_for_query(self, *args, **kwargs):
pass
def get_result(self):
return []
|
cristian99garcia/pilas-activity
|
pilas/fisica.py
|
Python
|
gpl-3.0
| 14,319
| 0.003006
|
# -*- encoding: utf-8 -*-
# pilas engine - a video game framework.
#
# copyright 2010 - hugo ruscitti
# license: lgplv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# website - http://www.pilas-engine.com.ar
import pilas
from pilas import colores
try:
import Box2D as box2d
except ImportError:
print "No esta disponible box2d, se deshabilitara la fisica."
import math
class Fisica(object):
"""Representa un simulador de mundo fisico, usando la biblioteca box2d."""
def __init__(self, area, gravedad=(0, -90)):
self.area = area
try:
self.escenario = box2d.b2AABB()
self.escenario.lowerBound = (-1000.0, -1000.0)
self.escenario.upperBound = (1000.0, 1000.0)
self.gravedad = box2d.b2Vec2(gravedad[0], gravedad[1])
try:
self.mundo = box2d.b2World(self.escenario, self.gravedad, True)
except ValueError:
print "Solo esta disponible el motor de fisica para box2d 2.0.2b1"
raise AttributeError("...")
except AttributeError:
print "Deshabilitando modulo de fisica (no se encuentra instalado pybox2d en este equipo)"
self.mundo = None
return
self.constante_mouse = None
self.i = 0
self.crear_bordes_del_escenario()
self.figuras_a_eliminar = []
def crear_bordes_del_escenario(self):
self.crear_techo(self.area)
self.crear_suelo(self.area)
self.crear_paredes(self.area)
def reiniciar(self):
for x in self.mundo.bodyList:
self.mundo.DestroyBody(x)
self.crear_bordes_del_escenario()
def capturar_figura_con_el_mouse(self, figura):
if self.constante_mouse:
self.cuando_suelta_el_mouse()
self.constante_mouse = ConstanteDeMovimiento(figura)
def cuando_mueve_el_mouse(self, x, y):
if self.constante_mouse:
self.constante_mouse.mover(x, y)
def cuando_suelta_el_mouse(self):
if self.constante_mouse:
self.constante_mouse.eliminar()
self.constante_mouse = None
def actualizar(self):
if self.mundo:
self.mundo.Step(1.0 / 20.0, 10, 8)
self.i += 1
self._procesar_figuras_a_eliminar()
def _procesar_figuras_a_eliminar(self):
"Elimina las figuras que han sido marcadas para quitar."
if self.figuras_a_eliminar:
for x in self.figuras_a_eliminar:
# Solo elimina las figuras que actualmente existen.
if x in self.mundo.bodyList:
self.mundo.DestroyBody(x)
self.figuras_a_eliminar = []
def dibujar_figuras_sobre_lienzo(self, motor, lienzo, grosor=1):
"Dibuja todas las figuras en una pizarra. Indicado para depuracion."
cuerpos = self.mundo.bodyList
cantidad_de_figuras = 0
for cuerpo in cuerpos:
xform = cuerpo.GetXForm()
for figura in cuerpo.shapeList:
cantidad_de_figuras += 1
tipo_de_figura = figura.GetType()
if tipo_de_figura == box2d.e_polygonShape:
vertices = []
for v in figura.vertices:
pt = box2d.b2Mul(xform, v)
vertices.append((pt.x, pt.y))
lienzo.poligono(motor, vertices, color=colores.rojo, grosor=grosor, cerrado=True)
elif tipo_de_figura == box2d.e_circleShape:
lienzo.circulo(motor, cuerpo.position.x, cuerpo.position.y, figura.radius, colores.rojo, grosor=grosor)
else:
print "no puedo identificar el tipo de figura."
def crear_cuerpo(self, definicion_de_cuerpo):
return self.mundo.CreateBody(definicion_de_cuerpo)
def crear_suelo(self, (ancho, alto), restitucion=0):
self.suelo = Rectangulo(0, -alto / 2, ancho, 2, dinamica=False, fisica=self, restitucion=restitucion)
def crear_techo(self, (ancho, alto), restitucion=0):
self.techo = Rectangulo(0, alto / 2, ancho, 2, dinamica=False, fisica=self, restitucion=restitucion)
def crear_paredes(self, (ancho, alto), restitucion=0):
self.pared_izquierda = Rectangulo(-ancho / 2, 0, 2, alto, dinamica=False, fisica=self, restitucion=restitucion)
self.pared_derecha = Rectangulo(ancho / 2, 0, 2, alto, dinamica=False, fisica=self, restitucion=restitucion)
def eliminar_suelo(self):
if self.suelo:
self.suelo.eliminar()
self.suelo = None
def eliminar_techo(self):
if self.techo:
self.techo.eliminar()
self.techo = None
def eliminar_paredes(self):
if self.pared_izquierda:
self.pared_derecha.eliminar()
self.pared_izquierda.eliminar()
self.pared_derecha = None
self.pared_izquierda = None
def eliminar_figura(self, figura):
self.figuras_a_eliminar.append(figura)
def obtener_distancia_al_suelo(self, x, y, dy):
"""Obtiene la distancia hacia abajo desde el punto (x,y).
El valor de 'dy' tiene que ser positivo.
Si la funcion no encuentra obstaculos retornara
dy, pero en paso contrario retornara un valor menor
a dy.
"""
if dy < 0:
raise Exception("El valor de 'dy' debe ser positivo, ahora vale '%f'." %(dy))
delta = 0
while delta < dy:
if self.obtener_cuerpos_en(x, y - delta):
return delta
delta += 1
return delta
def obtener_cuerpos_en(self, x, y):
"Retorna una lista de cuerpos que se encuentran en la posicion (x, y) o retorna una lista vacia []."
AABB = box2d.b2AABB()
f = 1
AABB.lowerBound = (x - f, y - f)
AABB.upperBound = (x + f, y + f)
cuantos, cuerpos = self.mundo.Query(AABB, 2)
if cuantos == 0:
return []
lista_de_cuerpos = []
for s in cuerpos:
cuerpo = s.GetBo
|
dy()
if s.TestPoint(cuerpo.GetXForm(), (x, y)):
lista_de_cuerpos.append(cuerpo)
return lista_de_cuerpos
def definir_gravedad(self, x, y):
pilas.fisica.definir_gravedad(x, y)
class Figura(object):
"""Representa un figura que simula un cuerpo fisico.
Esta figura es abstracta, no está pensada para crear
objetos a partir de ella. Se usa como base para
|
el resto
de las figuras cómo el Circulo o el Rectangulo simplemente."""
def obtener_x(self):
return self._cuerpo.position.x
def definir_x(self, x):
self._cuerpo.SetXForm((x, self.y), self._cuerpo.GetAngle())
def obtener_y(self):
return self._cuerpo.position.y
def definir_y(self, y):
self._cuerpo.SetXForm((self.x, y), self._cuerpo.GetAngle())
def obtener_rotacion(self):
return - math.degrees(self._cuerpo.GetAngle())
def definir_rotacion(self, angulo):
self._cuerpo.SetXForm((self.x, self.y), math.radians(-angulo))
def impulsar(self, dx, dy):
self._cuerpo.ApplyImpulse((dx, dy), self._cuerpo.GetWorldCenter())
def obtener_velocidad_lineal(self):
velocidad = self._cuerpo.GetLinearVelocity()
return (velocidad.x, velocidad.y)
def detener(self):
"""Hace que la figura regrese al reposo."""
self.definir_velocidad_lineal(0, 0)
def definir_velocidad_lineal(self, dx=None, dy=None):
anterior_dx, anterior_dy = self.obtener_velocidad_lineal()
if dx is None:
dx = anterior_dx
if dy is None:
dy = anterior_dy
self._cuerpo.SetLinearVelocity((dx, dy))
def empujar(self, dx=None, dy=None):
self.definir_velocidad_lineal(dx, dy)
def eliminar(self):
"""Quita una figura de la simulación."""
pilas.mundo.fisica.eliminar_figura(self._cuerpo)
x = property(obtener_x, definir_x, doc="define la posición horizontal.")
y = property(obtener_y, definir_y, doc="define la posición vertical.")
rotacion = property(obtener_rotacion
|
ramsateesh/designate
|
designate/backend/agent_backend/impl_bind9.py
|
Python
|
apache-2.0
| 5,255
| 0
|
# Copyright 2014 Rackspace Inc.
#
# Author: Tim Simmons <tim.simmons@rackspace.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import dns
import dns.resolver
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_log import log as logging
from designate.backend.agent_backend import base
from designate import exceptions
from designate import utils
from designate.i18n import _LI
LOG = logging.getLogger(__name__)
CFG_GROUP = 'backend:agent:bind9'
class Bind9Backend(base.AgentBackend):
__plugin_name__ = 'bind9'
__backend_status__ = 'untested'
@classmethod
def get_cfg_opts(cls):
group = cfg.OptGroup(
name='backend:agent:bind9', title="Configuration for bind9 backend"
)
opts = [
cfg.StrOpt('rndc-host', default='127.0.0.1', help='RNDC Host'),
cfg.IntOpt('rndc-port', default=953, help='RNDC Port'),
cfg.StrOpt('rndc-config-file', default=None,
|
help='RNDC Config File'),
cfg.StrOpt('rndc-key-file', default=None, help='RNDC Key File'),
cfg.StrOpt('zone-file-path', default='$state_path/zones',
help='Path where zone files are stored'),
cfg.StrOpt('query-destination', default='127.0.0.1',
help='Host to query when finding domains')
]
return [(group, opts)]
def s
|
tart(self):
LOG.info(_LI("Started bind9 backend"))
def find_domain_serial(self, domain_name):
LOG.debug("Finding %s" % domain_name)
resolver = dns.resolver.Resolver()
resolver.nameservers = [cfg.CONF[CFG_GROUP].query_destination]
try:
rdata = resolver.query(domain_name, 'SOA')[0]
except Exception:
return None
return rdata.serial
def create_domain(self, domain):
LOG.debug("Creating %s" % domain.origin.to_text())
self._sync_domain(domain, new_domain_flag=True)
def update_domain(self, domain):
LOG.debug("Updating %s" % domain.origin.to_text())
self._sync_domain(domain)
def delete_domain(self, domain_name):
LOG.debug('Delete Domain: %s' % domain_name)
rndc_op = 'delzone'
# RNDC doesn't like the trailing dot on the domain name
rndc_call = self._rndc_base() + [rndc_op, domain_name.rstrip('.')]
utils.execute(*rndc_call)
def _rndc_base(self):
rndc_call = [
'rndc',
'-s', cfg.CONF[CFG_GROUP].rndc_host,
'-p', str(cfg.CONF[CFG_GROUP].rndc_port),
]
if cfg.CONF[CFG_GROUP].rndc_config_file:
rndc_call.extend(['-c',
cfg.CONF[CFG_GROUP].rndc_config_file])
if cfg.CONF[CFG_GROUP].rndc_key_file:
rndc_call.extend(['-k',
cfg.CONF[CFG_GROUP].rndc_key_file])
return rndc_call
def _sync_domain(self, domain, new_domain_flag=False):
"""Sync a single domain's zone file and reload bind config"""
# NOTE: Different versions of BIND9 behave differently with a trailing
# dot, so we're just going to take it off.
domain_name = domain.origin.to_text().rstrip('.')
# NOTE: Only one thread should be working with the Zonefile at a given
# time. The sleep(1) below introduces a not insignificant risk
# of more than 1 thread working with a zonefile at a given time.
with lockutils.lock('bind9-%s' % domain_name):
LOG.debug('Synchronising Domain: %s' % domain_name)
zone_path = cfg.CONF[CFG_GROUP].zone_file_path
output_path = os.path.join(zone_path,
'%s.zone' % domain_name)
domain.to_file(output_path, relativize=False)
rndc_call = self._rndc_base()
if new_domain_flag:
rndc_op = [
'addzone',
'%s { type master; file "%s"; };' % (domain_name,
output_path),
]
rndc_call.extend(rndc_op)
else:
rndc_op = 'reload'
rndc_call.extend([rndc_op])
rndc_call.extend([domain_name])
LOG.debug('Calling RNDC with: %s' % " ".join(rndc_call))
self._execute_rndc(rndc_call)
def _execute_rndc(self, rndc_call):
try:
LOG.debug('Executing RNDC call: %s' % " ".join(rndc_call))
utils.execute(*rndc_call)
except utils.processutils.ProcessExecutionError as e:
LOG.debug('RNDC call failure: %s' % e)
raise exceptions.Backend(e)
|
DarkEnergySurvey/ugali
|
ugali/pipeline/run_04.0_peak_finder.py
|
Python
|
mit
| 4,508
| 0.013088
|
#!/usr/bin/env python
"""Perform object finding and association."""
import os, glob
from os.path import exists, join
import time
import fitsio
import numpy as np
from ugali.analysis.pipeline import Pipeline
from ugali.analysis.search import CandidateSearch
import ugali.candidate.associate
from ugali.utils.logger im
|
port logger
from ugali.utils.shell import mkdir
components = ['label','objects','associate','candidate','plot','www']
def load_candi
|
dates(filename,threshold=0):
""" Load candidates for plotting """
candidates = fitsio.read(filename,lower=True,trim_strings=True)
candidates = candidates[candidates['ts'] >= threshold]
return candidates
def run(self):
if 'label' in self.opts.run:
logger.info("Running 'label'...")
if not hasattr(self,'search'):
self.search = CandidateSearch(self.config)
if exists(self.search.labelfile) and not self.opts.force:
logger.info(" Found %s; skipping..."%self.search.labelfile)
else:
#self.search.createLabels3D()
#self.search.loadLikelhood()
#self.search.loadROI()
self.search.createLabels2D()
self.search.writeLabels()
if 'objects' in self.opts.run:
logger.info("Running 'objects'...")
if not hasattr(self,'search'):
self.search = CandidateSearch(self.config)
if exists(self.search.objectfile) and not self.opts.force:
logger.info(" Found %s; skipping..."%self.search.labelfile)
else:
self.search.loadLabels()
self.search.createObjects()
self.search.writeObjects()
if 'associate' in self.opts.run:
logger.info("Running 'associate'...")
if not hasattr(self,'search'):
self.search = CandidateSearch(self.config)
if exists(self.search.assocfile) and not self.opts.force:
logger.info(" Found %s; skipping..."%self.search.assocfile)
else:
self.search.loadObjects()
self.search.createAssociations()
self.search.writeAssociations()
if 'candidate' in self.opts.run:
logger.info("Running 'candidate'...")
if exists(self.search.candfile) and not self.opts.force:
logger.info(" Found %s; skipping..."%self.search.candfile)
else:
self.search.loadAssociations()
self.search.writeCandidates()
if 'plot' in self.opts.run:
self.opts.run.append('www')
logger.info("Running 'plot'...")
threshold = self.config['search']['cand_threshold']
outdir = mkdir(self.config['output']['plotdir'])
logdir = mkdir(join(outdir,'log'))
# Eventually move this into 'plotting' module
candidates = load_candidates(self.config.candfile,threshold)
for i,c in enumerate(candidates):
name = c['name'].replace('(','').replace(')','')
msg = "(%i/%i) Plotting %s (%.2f,%.2f)..."%(i,len(candidates),name,c['ra'],c['dec'])
logger.info(msg)
params = (self.opts.config,outdir,name,c['ra'],
c['dec'],0.5,c['modulus'])
cmd = 'ugali/scratch/PlotCandidate.py %s %s -n="%s" --cel %f %f --radius %s -m %.2f'
cmd = cmd%params
jobname = name.lower().replace(' ','_')
logfile = join(logdir,jobname+'.log')
batch = self.config['search'].get('batch',self.config['batch'])
out = [join(outdir,jobname+'.png'),
join(outdir,jobname+'_dist.png'),
join(outdir,jobname+'_scat.png')]
if all([exists(o) for o in out]) and not self.opts.force:
logger.info(" Found plots for %s; skipping..."%name)
else:
logger.info(cmd)
self.batch.submit(cmd,jobname,logfile,**batch.get(self.opts.queue,{}))
time.sleep(3)
if 'www' in self.opts.run:
logger.info("Running 'www'...")
threshold = self.config['search']['cand_threshold']
outdir = mkdir(self.config['output']['plotdir'])
# Eventually move this into 'plotting' module
candidates = load_candidates(self.config.candfile,threshold)
from ugali.utils.www import create_index_html
filename = os.path.join(outdir,'index.html')
create_index_html(filename,candidates)
Pipeline.run = run
pipeline = Pipeline(__doc__,components)
pipeline.parse_args()
pipeline.execute()
|
notmyname/swift
|
test/unit/cli/test_recon.py
|
Python
|
apache-2.0
| 44,459
| 0.000135
|
# Copyright (c) 2013 Christian Schwede <christian.schwede@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import mock
import os
import random
import re
import tem
|
pfile
import time
import unittest
import shutil
import string
import sys
import six
from eventlet.green import socket
from six import StringIO
from six.moves import urllib
from swift.cli import recon
from swift.common import utils
from swift.common.ring import builder
from swift.common.ring import utils as ring_utils
from swift.common.storage_policy import StoragePolicy, POLICIES
from test.unit import patch_policies
if six.PY3:
from eventlet.green.urllib import re
|
quest as urllib2
else:
from eventlet.green import urllib2
class TestHelpers(unittest.TestCase):
def test_seconds2timeunit(self):
self.assertEqual(recon.seconds2timeunit(10), (10, 'seconds'))
self.assertEqual(recon.seconds2timeunit(600), (10, 'minutes'))
self.assertEqual(recon.seconds2timeunit(36000), (10, 'hours'))
self.assertEqual(recon.seconds2timeunit(60 * 60 * 24 * 10),
(10, 'days'))
def test_size_suffix(self):
self.assertEqual(recon.size_suffix(5 * 10 ** 2), '500 bytes')
self.assertEqual(recon.size_suffix(5 * 10 ** 3), '5 kB')
self.assertEqual(recon.size_suffix(5 * 10 ** 6), '5 MB')
self.assertEqual(recon.size_suffix(5 * 10 ** 9), '5 GB')
self.assertEqual(recon.size_suffix(5 * 10 ** 12), '5 TB')
self.assertEqual(recon.size_suffix(5 * 10 ** 15), '5 PB')
self.assertEqual(recon.size_suffix(5 * 10 ** 18), '5 EB')
self.assertEqual(recon.size_suffix(5 * 10 ** 21), '5 ZB')
class TestScout(unittest.TestCase):
def setUp(self, *_args, **_kwargs):
self.scout_instance = recon.Scout("type", suppress_errors=True)
self.url = 'http://127.0.0.1:8080/recon/type'
self.server_type_url = 'http://127.0.0.1:8080/'
@mock.patch('eventlet.green.urllib2.urlopen')
def test_scout_ok(self, mock_urlopen):
mock_urlopen.return_value.read = lambda: json.dumps([])
url, content, status, ts_start, ts_end = self.scout_instance.scout(
("127.0.0.1", "8080"))
self.assertEqual(url, self.url)
self.assertEqual(content, [])
self.assertEqual(status, 200)
@mock.patch('eventlet.green.urllib2.urlopen')
def test_scout_url_error(self, mock_urlopen):
mock_urlopen.side_effect = urllib2.URLError("")
url, content, status, ts_start, ts_end = self.scout_instance.scout(
("127.0.0.1", "8080"))
self.assertIsInstance(content, urllib2.URLError)
self.assertEqual(url, self.url)
self.assertEqual(status, -1)
@mock.patch('eventlet.green.urllib2.urlopen')
def test_scout_http_error(self, mock_urlopen):
mock_urlopen.side_effect = urllib2.HTTPError(
self.url, 404, "Internal error", None, None)
url, content, status, ts_start, ts_end = self.scout_instance.scout(
("127.0.0.1", "8080"))
self.assertEqual(url, self.url)
self.assertIsInstance(content, urllib2.HTTPError)
self.assertEqual(status, 404)
@mock.patch('eventlet.green.urllib2.urlopen')
def test_scout_socket_timeout(self, mock_urlopen):
mock_urlopen.side_effect = socket.timeout("timeout")
url, content, status, ts_start, ts_end = self.scout_instance.scout(
("127.0.0.1", "8080"))
self.assertIsInstance(content, socket.timeout)
self.assertEqual(url, self.url)
self.assertEqual(status, -1)
@mock.patch('eventlet.green.urllib2.urlopen')
def test_scout_server_type_ok(self, mock_urlopen):
def getheader(name):
d = {'Server': 'server-type'}
return d.get(name)
mock_urlopen.return_value.info.return_value.getheader = getheader
url, content, status = self.scout_instance.scout_server_type(
("127.0.0.1", "8080"))
self.assertEqual(url, self.server_type_url)
self.assertEqual(content, 'server-type')
self.assertEqual(status, 200)
@mock.patch('eventlet.green.urllib2.urlopen')
def test_scout_server_type_url_error(self, mock_urlopen):
mock_urlopen.side_effect = urllib2.URLError("")
url, content, status = self.scout_instance.scout_server_type(
("127.0.0.1", "8080"))
self.assertIsInstance(content, urllib2.URLError)
self.assertEqual(url, self.server_type_url)
self.assertEqual(status, -1)
@mock.patch('eventlet.green.urllib2.urlopen')
def test_scout_server_type_http_error(self, mock_urlopen):
mock_urlopen.side_effect = urllib2.HTTPError(
self.server_type_url, 404, "Internal error", None, None)
url, content, status = self.scout_instance.scout_server_type(
("127.0.0.1", "8080"))
self.assertEqual(url, self.server_type_url)
self.assertIsInstance(content, urllib2.HTTPError)
self.assertEqual(status, 404)
@mock.patch('eventlet.green.urllib2.urlopen')
def test_scout_server_type_socket_timeout(self, mock_urlopen):
mock_urlopen.side_effect = socket.timeout("timeout")
url, content, status = self.scout_instance.scout_server_type(
("127.0.0.1", "8080"))
self.assertIsInstance(content, socket.timeout)
self.assertEqual(url, self.server_type_url)
self.assertEqual(status, -1)
@patch_policies
class TestRecon(unittest.TestCase):
def setUp(self, *_args, **_kwargs):
self.swift_conf_file = utils.SWIFT_CONF_FILE
self.recon_instance = recon.SwiftRecon()
self.swift_dir = tempfile.mkdtemp()
self.ring_name = POLICIES.legacy.ring_name
self.tmpfile_name = os.path.join(
self.swift_dir, self.ring_name + '.ring.gz')
self.ring_name2 = POLICIES[1].ring_name
self.tmpfile_name2 = os.path.join(
self.swift_dir, self.ring_name2 + '.ring.gz')
swift_conf = os.path.join(self.swift_dir, 'swift.conf')
self.policy_name = ''.join(random.sample(string.letters, 20))
with open(swift_conf, "wb") as sc:
sc.write('''
[swift-hash]
swift_hash_path_suffix = changeme
[storage-policy:0]
name = default
default = yes
[storage-policy:1]
name = unu
aliases = %s
''' % self.policy_name)
def tearDown(self, *_args, **_kwargs):
utils.SWIFT_CONF_FILE = self.swift_conf_file
shutil.rmtree(self.swift_dir, ignore_errors=True)
def _make_object_rings(self):
ringbuilder = builder.RingBuilder(2, 3, 1)
devs = [
'r0z0-127.0.0.1:10000/sda1',
'r0z1-127.0.0.1:10001/sda1',
'r1z0-127.0.0.1:10002/sda1',
'r1z1-127.0.0.1:10003/sda1',
]
for raw_dev_str in devs:
dev = ring_utils.parse_add_value(raw_dev_str)
dev['weight'] = 1.0
ringbuilder.add_dev(dev)
ringbuilder.rebalance()
ringbuilder.get_ring().save(self.tmpfile_name)
ringbuilder = builder.RingBuilder(2, 2, 1)
devs = [
'r0z0-127.0.0.1:10000/sda1',
'r0z1-127.0.0.2:10004/sda1',
]
for raw_dev_str in devs:
dev = ring_utils.parse_add_value(raw_dev_str)
dev['weight'] = 1.0
ringbuilder.add_dev(dev)
ringbuilder.rebalance()
ringbuilder.get_ring().save(self.tmpfile_name2)
def test_gen_stats(self):
stats = self.recon_instance._gen_stats((1, 4, 10, None), 'Sample')
self.assertEqual(stats.get('name'), 'Sample')
self.assertEqual(stats.get('average'), 5.0)
se
|
nemobis/bots
|
iccd-trc2csv.py
|
Python
|
gpl-3.0
| 4,621
| 0.01926
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Script to convert an ICCD TRC file to CSV format.
The input file is assumed to be UTF-8 with UNIX line ending.
"""
#
# (C) Federico Leva, 2016
#
# Distributed under the terms of the MIT license.
#
__version__ = '0.1.0'
import codecs
import unicodecsv as csv
from collections import namedtuple, defaultdict
import os
import re
import sys
import subprocess
# Uploader
import pywikibot
import pywikibot.data.api
from pywikibot import config
from upload import UploadRobot
# Campi ripetibili
# http://www.iccd.beniculturali.it/index.php?it/473/standard-catalografici/Standard/29
rip = ['AAT', 'ACC', 'ACS', 'ADM', 'ADT', 'AGG', 'AGGF', 'AGGR', 'AIN', 'ALN', 'ATB', 'ATBM', 'AUT', 'AUTM', 'BIB', 'BIL', 'CDGI', 'CDGS', 'CMM', 'CMMN', 'CMPN', 'DESI', 'DESS', 'DRA', 'DSCA', 'DSCF', 'DTM', 'EDT', 'ESP', 'FNT', 'FTA', 'FUR', 'INV', 'ISP', 'ISR', 'MIS', 'MISV', 'MST', 'MSTD', 'MSTL', 'MTC', 'NVC', 'NVCD', 'RCGA', 'RCGS', 'REG', 'REI', 'ROZ', 'RSE', 'RSR', 'RST', 'RSTN', 'RSTR', 'RVES', 'SGTI', 'SGTT', 'STI', 'STM', 'VDC']
trc = codecs.open('iccd.trc', 'r', encoding='utf-8')
records = trc.read().split('CD:\n')[1:]
trc.close()
data = []
for i in range(0, len(records)-1):
data.append({})
counter = defaultdict(int)
record = re.sub(r'\n {6}', '', re.sub(r'\.\n {6}', ' ', records[i]))
for field in record.splitlines():
datum = field.split(': ', 1)
if len(datum) < 2:
# This must be a 2 or 3 letters code without content
datum = field.split(':', 1)
# Take note of which iteration of this field we're at, to properly store subfields.
if datum[0] in rip:
counter[datum[0]] += 1
continue
else:
# Take note of which iteration of this field we're at, to properly store subfields.
if datum[0] in rip:
counter[datum[0]] += 1
if datum[0] not in rip:
if datum[0][:-1] not in rip:
# We're in luck! Just add the field to our table.
data[i][datum[0]] = datum[1]
else:
data[i][datum[0][:-1] + str(counter[datum[0][:-1]]) + datum[0][-1]] = datum[1]
else:
if datum[0][:-1] not in rip:
data[i][datum[0] + str(counter[datum[0]])] = datum[1]
else:
data[i][datum[0][:-1] + str(counter[datum[0][:-1]]) + datum[0][-1] + str(counter[datum[0]])] = datum[1]
# Anticipate the upload here until we get the CSV writing fixed
# FIXME: split and actually save to csvfile
for i in range(0, len(data)-1):
description = u"""{{ICCD TRC
| institution = {{institution:Museoscienza}}
| permission = {{cc-by-sa-4.0}}
"""
filenames = []
directory = './Foto_CATALOGO_01/%s_foto/' % data[i]['IDK'].split('-')[0].strip()
for key in data[i
|
].it
|
erkeys():
if key == "IDK":
description += "| source = {{Museoscienza|idk=%s}}\n" % data[i]['IDK']
else:
description += u"| %s = %s\n" % (key, data[i][key])
if re.match('FTA[0-9]+I', key):
filenames.append(directory + data[i][key])
description += u"}}"
# The filenames may have excess leading zeros, but we do not want partial matches.
needle = r'(^|[^0-9])0*%s[^0-9]' % re.sub('[^0-9]', '', data[i]['INV1N'])
for image in os.listdir(directory):
if re.match(needle, image):
filenames.append(directory + image)
if not filenames:
print "ERROR: No files found for record %s, inventory %s" % (data[i]['IDK'], data[i]['INV1N'])
continue
for filename in filenames:
try:
prefix = "%s %s" % (data[i]['OGTD'], data[i]['OGTT'])
except:
prefix = data[i]['OGTD']
prefix = re.sub('[#<>\[\]|{}/?]', '', prefix)
commons = u"%s - Museo scienza tecnologia Milano %s" % (prefix, filename.split('/')[-1])
print commons
try:
upload = UploadRobot(filename, description=description,
useFilename=commons, keepFilename=True,
verifyDescription=False, ignoreWarning=False, aborts=True)
upload.run()
os.remove(filename)
except:
pywikibot.output("ERROR: The upload could not be completed.")
"""
# Prepare to write out to CSV: find out what columns we need
fieldnames = {}
header = []
for i in range(0, len(data)-1):
for key in data[i].iterkeys():
fieldnames[key] = True
for name in fieldnames.iterkeys():
header.append(name)
print(header)
# Fill the blanks and get an actual table
for i in range(0, len(data)-1):
for column in header:
if column not in data[i]:
data[i][column] = u""
table = namedtuple('table', ', '.join(header))
table = [table._make(row) for row in data]
# Actually write out to CSV
with codecs.open('iccd.csv', 'w', encoding='utf-8') as csvfile:
out = csv.writer(csvfile, delimiter='\t',
lineterminator='\n',
quoting=csv.QUOTE_MINIMAL)
out.writerow(header)
for row in table:
out.writerow(row)
"""
|
Semprini/cbe
|
cbe/cbe/wsgi.py
|
Python
|
apache-2.0
| 383
| 0
|
"""
WSGI config for cbe project.
It exposes the WSGI callable as
|
a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi impor
|
t get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cbe.settings")
application = get_wsgi_application()
|
vgteam/toil-vg
|
src/toil_vg/iostore.py
|
Python
|
apache-2.0
| 36,706
| 0.009562
|
"""
IOStore class originated here
https://github.com/BD2KGenomics/hgvm-graph-bakeoff-evaluations/blob/master/scripts/toillib.py
and was then here:
https://github.com/cmarkello/toil-lib/blob/master/src/toil_lib/toillib.py
In a perfect world, this would be deprecated and replaced with Toil's stores.
Actually did this here:
https://github.com/glennhickey/toil-vg/tree/issues/110-fix-iostore
But couldn't get Toil's multipart S3 uploader working on large files. Also,
the toil jobStore interface is a little less clean for our use.
So for now keep as part of toil-vg where it works. Could also consider merging
into the upstream toil-lib
https://github.com/BD2KGenomics/toil-lib
"""
import sys, os, os.path, json, collections, logging, logging.handlers
import struct, socket, threading, tarfile, shutil
import tempfile
import functools
import random
import time
import dateutil
import traceback
import stat
from toil.realtimeLogger import RealtimeLogger
import datetime
# Need stuff for Amazon s3
try:
import boto3
import botocore
have_s3 = True
except ImportError:
have_s3 = False
pass
# We need some stuff in order to have Azure
try:
import azure
# Make sure to get the 0.11 BlobService, in case the new azure storage
# module is also installed.
from azure.storage.blob import BlobService
import toil.jobStores.azureJobStore
have_azure = True
except ImportError:
have_azure = False
pass
def robust_makedirs(directory):
"""
Make a directory when other nodes may be trying to do the same on a shared
filesystem.
"""
if not os.path.exists(directory):
try:
# Make it if it doesn't exist
os.makedirs(directory)
except OSError:
# If you can't make it, maybe someone else did?
pass
# Make sure it exists and is a directory
assert(os.path.exists(directory) and os.path.isdir(directory))
def write_global_directory(file_store, path, cleanup=False, tee=None, compress=True):
"""
Write the given directory into the file store, and return an ID that can be
used to retrieve it. Writes the files in the directory and subdirectories
into a tar file in the file store.
Does not preserve the name or permissions of the given directory (only of
its contents).
If cleanup is true, directory will be deleted from the file store when this
job and its follow-ons finish.
If tee is passed, a tar.gz of the directory contents will be written to that
filename. The file thus created must not be modified after this function is
called.
"""
write_stream_mode = "w"
if compress:
write_stream_mode = "w|gz"
if tee is not None:
with open(tee, "w") as file_handle:
# We have a stream, so start taring into it
with tarfile.open(fileobj=file_handle, mode=write_stream_mode) as tar:
# Open it for streaming-only write (no seeking)
# We can't just add the root directory, since then we wouldn't be
# able to extract it later with an arbitrary name.
for file_name in os.listdir(path):
# Add each file in the directory to the
|
tar, with a relative
# path
tar.add(os.path.join(path, file_name), arcname=file_name)
# Save the file on disk to the file store.
return file_store.writeGlobalFile(tee)
el
|
se:
with file_store.writeGlobalFileStream(cleanup=cleanup) as (file_handle,
file_id):
# We have a stream, so start taring into it
# TODO: don't duplicate this code.
with tarfile.open(fileobj=file_handle, mode=write_stream_mode) as tar:
# Open it for streaming-only write (no seeking)
# We can't just add the root directory, since then we wouldn't be
# able to extract it later with an arbitrary name.
for file_name in os.listdir(path):
# Add each file in the directory to the tar, with a relative
# path
tar.add(os.path.join(path, file_name), arcname=file_name)
# Spit back the ID to use to retrieve it
return file_id
def read_global_directory(file_store, directory_id, path):
"""
Reads a directory with the given tar file id from the global file store and
recreates it at the given path.
The given path, if it exists, must be a directory.
Do not use to extract untrusted directories, since they could sneakily plant
files anywhere on the filesystem.
"""
# Make the path
robust_makedirs(path)
with file_store.readGlobalFileStream(directory_id) as file_handle:
# We need to pull files out of this tar stream
with tarfile.open(fileobj=file_handle, mode="r|*") as tar:
# Open it for streaming-only read (no seeking)
# We need to extract the whole thing into that new directory
tar.extractall(path)
class IOStore(object):
"""
A class that lets you get your input files and save your output files
to/from a local filesystem, Amazon S3, or Microsoft Azure storage
transparently.
This is the abstract base class; other classes inherit from this and fill in
the methods.
"""
def __init__(self):
"""
Make a new IOStore
"""
raise NotImplementedError()
def read_input_file(self, input_path, local_path):
"""
Read an input file from wherever the input comes from and send it to the
given path.
If the file at local_path already exists, it is overwritten.
If the file at local_path already exists and is a directory, behavior is
undefined.
"""
raise NotImplementedError()
def list_input_directory(self, input_path, recursive=False,
with_times=False):
"""
Yields each of the subdirectories and files in the given input path.
If recursive is false, yields files and directories in the given
directory. If recursive is true, yields all files contained within the
current directory, recursively, but does not yield folders.
If with_times is True, yields (name, modification time) pairs instead of
just names, with modification times represented as datetime objects in
the GMT timezone. Modification times may be None on objects that do not
support them.
Gives relative file/directory names.
"""
raise NotImplementedError()
def write_output_file(self, local_path, output_path):
"""
Save the given local file to the given output path. No output directory
needs to exist already.
If the output path already exists, it is overwritten.
If the output path already exists and is a directory, behavior is
undefined.
"""
raise NotImplementedError()
def exists(self, path):
"""
Returns true if the given input or output file exists in the store
already.
"""
raise NotImplementedError()
def get_mtime(self, path):
"""
Returns the modification time of the given gile if it exists, or None
otherwise.
"""
raise NotImplementedError()
def get_size(self, path):
"""
Returns the size in bytes of the given file if it exists, or None
otherwise.
"""
raise NotImplementedError()
@staticmethod
def absolute(store_string):
"""
Convert a relative path IOStore string to an absolute path one. Leaves
strings tha
|
kjchalup/dtit
|
fcit/fcit.py
|
Python
|
mit
| 7,474
| 0.001338
|
""" A fast conditional independence test.
This implementation uses the joblib library to parallelize test
statistic computation over all available cores. By default, num_perm=8
(instead of num_perm=10 in the non-parallel version) as 8 cores is a
common number on current architectures.
Reference:
Chalupka, Krzysztof and Perona, Pietro and Eberhardt, Frederick, 2017.
"""
import os
import time
import joblib
import numpy as np
from scipy.stats import ttest_1samp
from sklearn.tree import DecisionTreeRegressor
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import RandomizedSearchCV
from sklearn.model_selection import ShuffleSplit
from sklearn.random_projection import GaussianRandomProjection
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import mean_squared_error as mse
def interleave(x, z, seed=None):
""" Interleave x and z dimension-wise.
Args:
x (n_samples, x_dim) array.
z (n_samples, z_dim) array.
Returns
An array of shape (n_samples, x_dim + z_dim) in which
the columns of x and z are interleaved at random.
"""
state = np.random.get_state()
np.random.seed(seed or int(time.time()))
total_ids = np.random.permutation(x.shape[1]+z.shape[1])
np.random.set_state(state)
out = np.zeros([x.shape[0], x.shape[1] + z.shape[1]])
out[:, total_ids[:x.shape[1]]] = x
out[:, total_ids[x.shape[1]:]] = z
return out
def cv_besttree(x, y, z, cv_grid, logdim, verbose, prop_test):
""" Choose the best decision tree hyperparameters by
cross-validation. The hyperparameter to optimize is min_samples_split
(see sklearn's DecisionTreeRegressor).
Args:
x (n_samples, x_dim): Input data array.
y (n_samples, y_dim): Output data array.
|
z (n_samples, z_dim):
|
Optional auxiliary input data.
cv_grid (list of floats): List of hyperparameter values to try.
logdim (bool): If True, set max_features to 'log2'.
verbose (bool): If True, print out extra info.
prop_test (float): Proportion of validation data to use.
Returns:
DecisionTreeRegressor with the best hyperparameter setting.
"""
xz_dim = x.shape[1] + z.shape[1]
max_features='log2' if (logdim and xz_dim > 10) else None
if cv_grid is None:
min_samples_split = 2
elif len(cv_grid) == 1:
min_samples_split = cv_grid[0]
else:
clf = DecisionTreeRegressor(max_features=max_features)
splitter = ShuffleSplit(n_splits=3, test_size=prop_test)
cv = GridSearchCV(estimator=clf, cv=splitter,
param_grid={'min_samples_split': cv_grid}, n_jobs=-1)
cv.fit(interleave(x, z), y)
min_samples_split = cv.best_params_['min_samples_split']
if verbose:
print('min_samples_split: {}.'.format(min_samples_split))
clf = DecisionTreeRegressor(max_features=max_features,
min_samples_split=min_samples_split)
return clf
def obtain_error(data_and_i):
"""
A function used for multithreaded computation of the fcit test statistic.
data['x']: First variable.
data['y']: Second variable.
data['z']: Conditioning variable.
data['data_permutation']: Permuted indices of the data.
data['perm_ids']: Permutation for the bootstrap.
data['n_test']: Number of test points.
data['clf']: Decision tree regressor.
"""
data, i = data_and_i
x = data['x']
y = data['y']
z = data['z']
if data['reshuffle']:
perm_ids = np.random.permutation(x.shape[0])
else:
perm_ids = np.arange(x.shape[0])
data_permutation = data['data_permutation'][i]
n_test = data['n_test']
clf = data['clf']
x_z = interleave(x[perm_ids], z, seed=i)
clf.fit(x_z[data_permutation][n_test:], y[data_permutation][n_test:])
return mse(y[data_permutation][:n_test],
clf.predict(x_z[data_permutation][:n_test]))
def test(x, y, z=None, num_perm=8, prop_test=.1,
discrete=(False, False), plot_return=False, verbose=False,
logdim=False, cv_grid=[2, 8, 64, 512, 1e-2, .2, .4], **kwargs):
""" Fast conditional independence test, based on decision-tree regression.
See Chalupka, Perona, Eberhardt 2017 [arXiv link coming].
Args:
x (n_samples, x_dim): First variable.
y (n_samples, y_dim): Second variable.
z (n_samples, z_dim): Conditioning variable. If z==None (default),
then performs an unconditional independence test.
num_perm: Number of data permutations to estimate
the p-value from marginal stats.
prop_test (int): Proportion of data to evaluate test stat on.
discrete (bool, bool): Whether x or y are discrete.
plot_return (bool): If True, return statistics useful for plotting.
verbose (bool): Print out progress messages (or not).
logdim (bool): If True, set max_features='log2' in the decision tree.
cv_grid (list): min_impurity_splits to cross-validate when training
the decision tree regressor.
Returns:
p (float): The p-value for the null hypothesis
that x is independent of y.
"""
# Compute test set size.
n_samples = x.shape[0]
n_test = int(n_samples * prop_test)
if z is None:
z = np.empty([n_samples, 0])
if discrete[0] and not discrete[1]:
# If x xor y is discrete, use the continuous variable as input.
x, y = y, x
elif x.shape[1] < y.shape[1]:
# Otherwise, predict the variable with fewer dimensions.
x, y = y, x
# Normalize y to make the decision tree stopping criterion meaningful.
y = StandardScaler().fit_transform(y)
# Set up storage for true data and permuted data MSEs.
d0_stats = np.zeros(num_perm)
d1_stats = np.zeros(num_perm)
data_permutations = [
np.random.permutation(n_samples) for i in range(num_perm)]
# Compute mses for y = f(x, z), varying train-test splits.
clf = cv_besttree(x, y, z, cv_grid, logdim, verbose, prop_test=prop_test)
datadict = {
'x': x,
'y': y,
'z': z,
'data_permutation': data_permutations,
'n_test': n_test,
'reshuffle': False,
'clf': clf,
}
d1_stats = np.array(joblib.Parallel(n_jobs=-1, max_nbytes=100e6)(
joblib.delayed(obtain_error)((datadict, i)) for i in range(num_perm)))
# Compute mses for y = f(x, reshuffle(z)), varying train-test splits.
if z.shape[1] == 0:
x_indep_y = x[np.random.permutation(n_samples)]
else:
x_indep_y = np.empty([x.shape[0], 0])
clf = cv_besttree(x_indep_y, y, z, cv_grid, logdim,
verbose, prop_test=prop_test)
datadict['reshuffle'] = True
datadict['x'] = x_indep_y
d0_stats = np.array(joblib.Parallel(n_jobs=-1, max_nbytes=100e6)(
joblib.delayed(obtain_error)((datadict, i)) for i in range(num_perm)))
if verbose:
np.set_printoptions(precision=3)
print('D0 statistics: {}'.format(d0_stats))
print('D1 statistics: {}\n'.format(d1_stats))
# Compute the p-value (one-tailed t-test
# that mean of mse ratios equals 1).
t, p_value = ttest_1samp(d0_stats / d1_stats, 1)
if t < 0:
p_value = 1 - p_value / 2
else:
p_value = p_value / 2
if plot_return:
return (p_value, d0_stats, d1_stats)
else:
return p_value
|
xiangke/pycopia
|
mibs/pycopia/mibs/HOST_RESOURCES_MIB_OID.py
|
Python
|
lgpl-2.1
| 5,000
| 0.0164
|
# python
# This file is generated by a program (mib2py).
import HOST_RESOURCES_MIB
OIDMAP = {
'1.3.6.1.2.1.25': HOST_RESOURCES_MIB.host,
'1.3.6.1.2.1.25.1': HOST_RESOURCES_MIB.hrSystem,
'1.3.6.1.2.1.25.2': HOST_RESOURCES_MIB.hrStorage,
'1.3.6.1.2.1.25.2.1': HOST_RESOURCES_MIB.hrStorageTypes,
'1.3.6.1.2.1.25.3': HOST_RESOURCES_MIB.hrDevice,
'1.3.6.1.2.1.25.3.1': HOST_RESOURCES_MIB.hrDeviceTypes,
'1.3.6.1.2.1.25.3.9': HOST_RESOURCES_MIB.hrFSTypes,
'1.3.6.1.2.1.25.4': HOST_RESOURCES_MIB.hrSWRun,
'1.3.6.1.2.1.25.5': HOST_RESOURCES_MIB.hrSWRunPerf,
'1.3.6.1.2.1.25.6': HOST_RESOURCES_MIB.hrSWInstalled,
'1.3.6.1.2.1.25.7': HOST_RESOURCES_MIB.hrMIBAdminInfo,
'1.3.6.1.2.1.25.7.1': HOST_RESOURCES_MIB.hostResourcesMibModule,
'1.3.6.1.2.1.25.7.2': HOST_RESOURCES_MIB.hrMIBCompliances,
'1.3.6.1.2.1.25.7.3': HOST_RESOURCES_MIB.hrMIBGroups,
'1.3.6.1.2.1.25.1.1': HOST_RESOURCES_MIB.hrSystemUptime,
'1.3.6.1.2.1.25.1.2': HOST_RESOURCES_MIB.hrSystemDate,
'1.3.6.1.2.1.25.1.3': HOST_RESOURCES_MIB.hrSystemInitialLoadDevice,
'1.3.6.1.2.1.25.1.4': HOST_RESOURCES_MIB.hrSystemInitialLoadParameters,
'1.3.6.1.2.1.25.1.5': HOST_RESOURCES_MIB.hrSystemNumUsers,
'1.3.6.1.2.1.25.1.6': HOST_RESOURCES_MIB.hrSystemProcesses,
'1.3.6.1.2.1.25.1.7': HOST_RESOURCES_MIB.hrSystemMaxProcesses,
'1.3.6.1.2.1.25.2.2': HOST_RESOURCES_MIB.hrMemorySize,
'1.3.6.1.2.1.25.4.1': HOST_RESOURCES_MIB.hrSWOSIndex,
'1.3.6.1.2.1.25.6.1': HOST_RESOURCES_MIB.hrSWInstalledLastChange,
'1.3.6.1.2.1.25.6.2': HOST_RESOURCES_MIB.hrSWInstalledLastUpdateTime,
'1.3.6.1.2.1.25.2.3.1.1': HOST_RESOURCES_MIB.hrStorageIndex,
'1.3.6.1.2.1.25.2.3.1.2': HOST_RESOURCES_MIB.hrStorageType,
'1.3.6.1.2.1.25.2.3.1.3': HOST_RESOURCES_MIB.hrStorageDescr,
'1.3.6.1.2.1.25.2.3.1.4': HOST_RESOURCES_MIB.hrStorageAllocationUnits,
'1.3.6.1.2.1.25.2.3.1.5': HOST_RESOURCES_MIB.hrStorageSize,
'1.3.6.1.2.1.25.2.3.1.6': HOST_RESOURCES_MIB.hrStorageUsed,
'1.3.6.1.2.1.25.2.3.1.7': HOST_RESO
|
URCES_MIB.hrStorageAllocationFailures,
'1.3.6.1.2.1.25.3.2.1.1': HOST_RESOURCES_MIB.hrDeviceIndex,
'1.3.6.1.2.1.25.3.2.1.2': HOST_RESOURCES_MIB.hrDeviceType,
'1.3.6.1.2.1.25.3.2.1.3': HOST_RESOURCES_MIB.hrDeviceDescr,
'1.3.6.1.2.1.25.3.2.1.4': HOST_RESOURCES_MIB.hrDeviceID,
'1.3.6.1.2.1.25.3.2.1.5': HOST_RESOURCES_MIB.hrDeviceStatus,
'1.3.6.1.2.1.25.3.2.1.6': HOST_RESOURCES_MIB.hrDeviceErrors,
'1.3.6.1.2.1.25.3.3.1.1': HOST_RESOURCES_MIB.hrProcessorFrwID,
'1.3.6.1
|
.2.1.25.3.3.1.2': HOST_RESOURCES_MIB.hrProcessorLoad,
'1.3.6.1.2.1.25.3.4.1.1': HOST_RESOURCES_MIB.hrNetworkIfIndex,
'1.3.6.1.2.1.25.3.5.1.1': HOST_RESOURCES_MIB.hrPrinterStatus,
'1.3.6.1.2.1.25.3.5.1.2': HOST_RESOURCES_MIB.hrPrinterDetectedErrorState,
'1.3.6.1.2.1.25.3.6.1.1': HOST_RESOURCES_MIB.hrDiskStorageAccess,
'1.3.6.1.2.1.25.3.6.1.2': HOST_RESOURCES_MIB.hrDiskStorageMedia,
'1.3.6.1.2.1.25.3.6.1.3': HOST_RESOURCES_MIB.hrDiskStorageRemoveble,
'1.3.6.1.2.1.25.3.6.1.4': HOST_RESOURCES_MIB.hrDiskStorageCapacity,
'1.3.6.1.2.1.25.3.7.1.1': HOST_RESOURCES_MIB.hrPartitionIndex,
'1.3.6.1.2.1.25.3.7.1.2': HOST_RESOURCES_MIB.hrPartitionLabel,
'1.3.6.1.2.1.25.3.7.1.3': HOST_RESOURCES_MIB.hrPartitionID,
'1.3.6.1.2.1.25.3.7.1.4': HOST_RESOURCES_MIB.hrPartitionSize,
'1.3.6.1.2.1.25.3.7.1.5': HOST_RESOURCES_MIB.hrPartitionFSIndex,
'1.3.6.1.2.1.25.3.8.1.1': HOST_RESOURCES_MIB.hrFSIndex,
'1.3.6.1.2.1.25.3.8.1.2': HOST_RESOURCES_MIB.hrFSMountPoint,
'1.3.6.1.2.1.25.3.8.1.3': HOST_RESOURCES_MIB.hrFSRemoteMountPoint,
'1.3.6.1.2.1.25.3.8.1.4': HOST_RESOURCES_MIB.hrFSType,
'1.3.6.1.2.1.25.3.8.1.5': HOST_RESOURCES_MIB.hrFSAccess,
'1.3.6.1.2.1.25.3.8.1.6': HOST_RESOURCES_MIB.hrFSBootable,
'1.3.6.1.2.1.25.3.8.1.7': HOST_RESOURCES_MIB.hrFSStorageIndex,
'1.3.6.1.2.1.25.3.8.1.8': HOST_RESOURCES_MIB.hrFSLastFullBackupDate,
'1.3.6.1.2.1.25.3.8.1.9': HOST_RESOURCES_MIB.hrFSLastPartialBackupDate,
'1.3.6.1.2.1.25.4.2.1.1': HOST_RESOURCES_MIB.hrSWRunIndex,
'1.3.6.1.2.1.25.4.2.1.2': HOST_RESOURCES_MIB.hrSWRunName,
'1.3.6.1.2.1.25.4.2.1.3': HOST_RESOURCES_MIB.hrSWRunID,
'1.3.6.1.2.1.25.4.2.1.4': HOST_RESOURCES_MIB.hrSWRunPath,
'1.3.6.1.2.1.25.4.2.1.5': HOST_RESOURCES_MIB.hrSWRunParameters,
'1.3.6.1.2.1.25.4.2.1.6': HOST_RESOURCES_MIB.hrSWRunType,
'1.3.6.1.2.1.25.4.2.1.7': HOST_RESOURCES_MIB.hrSWRunStatus,
'1.3.6.1.2.1.25.5.1.1.1': HOST_RESOURCES_MIB.hrSWRunPerfCPU,
'1.3.6.1.2.1.25.5.1.1.2': HOST_RESOURCES_MIB.hrSWRunPerfMem,
'1.3.6.1.2.1.25.6.3.1.1': HOST_RESOURCES_MIB.hrSWInstalledIndex,
'1.3.6.1.2.1.25.6.3.1.2': HOST_RESOURCES_MIB.hrSWInstalledName,
'1.3.6.1.2.1.25.6.3.1.3': HOST_RESOURCES_MIB.hrSWInstalledID,
'1.3.6.1.2.1.25.6.3.1.4': HOST_RESOURCES_MIB.hrSWInstalledType,
'1.3.6.1.2.1.25.6.3.1.5': HOST_RESOURCES_MIB.hrSWInstalledDate,
'1.3.6.1.2.1.25.7.3.1': HOST_RESOURCES_MIB.hrSystemGroup,
'1.3.6.1.2.1.25.7.3.2': HOST_RESOURCES_MIB.hrStorageGroup,
'1.3.6.1.2.1.25.7.3.3': HOST_RESOURCES_MIB.hrDeviceGroup,
'1.3.6.1.2.1.25.7.3.4': HOST_RESOURCES_MIB.hrSWRunGroup,
'1.3.6.1.2.1.25.7.3.5': HOST_RESOURCES_MIB.hrSWRunPerfGroup,
'1.3.6.1.2.1.25.7.3.6': HOST_RESOURCES_MIB.hrSWInstalledGroup,
}
|
AMOboxTV/AMOBox.LegoBuild
|
plugin.video.titan/resources/lib/resolvers/ishared.py
|
Python
|
gpl-2.0
| 1,793
| 0.016732
|
# -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Fr
|
ee Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details
|
.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib
from resources.lib.libraries import client
from resources.lib.libraries import jsunpack
def resolve(url):
try:
headers = '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': url})
url = url.replace('/video/', '/embed/')
result = client.request(url)
unpacked = ''
packed = result.split('\n')
for i in packed:
try: unpacked += jsunpack.unpack(i)
except: pass
result += unpacked
result = re.sub('\s\s+', ' ', result)
var = re.compile('var\s(.+?)\s*=\s*\'(.+?)\'').findall(result)
for i in range(100):
for v in var: result = result.replace("' %s '" % v[0], v[1]).replace("'%s'" % v[0], v[1])
url = re.compile('sources\s*:\s*\[.+?file\s*:\s*(.+?)\s*\,').findall(result)[0]
var = re.compile('var\s+%s\s*=\s*\'(.+?)\'' % url).findall(result)
if len(var) > 0: url = var[0].strip()
url += headers
if url.startswith('http'): return url
except:
return
|
zephyrplugins/zephyr
|
zephyr.plugin.jython/jython2.5.2rc3/Lib/test/test_list_jy.py
|
Python
|
epl-1.0
| 3,309
| 0.007555
|
import unittest
import random
import threading
import time
from test import test_support
if test_support.is_jython:
from java.util import ArrayList
from java.lang import String
class ListT
|
estCase(unittest.TestCase):
def test_recursive_list_slices(self):
x = [1,2,3,4,5]
x[1:] = x
self.assertEquals(x, [1, 1, 2, 3, 4, 5],
"Recursive assignment to list slices failed")
def test_sub
|
class_richcmp(self):
# http://bugs.jython.org/issue1115
class Foo(list):
def __init__(self, dotstring):
list.__init__(self, map(int, dotstring.split(".")))
bar1 = Foo('1.2.3')
bar2 = Foo('1.2.4')
self.assert_(bar1 < bar2)
self.assert_(bar1 <= bar2)
self.assert_(bar2 > bar1)
self.assert_(bar2 >= bar1)
def test_setget_override(self):
if not test_support.is_jython:
return
# http://bugs.jython.org/issue600790
class GoofyListMapThing(ArrayList):
def __init__(self):
self.silly = "Nothing"
def __setitem__(self, key, element):
self.silly = "spam"
def __getitem__(self, key):
self.silly = "eggs"
glmt = GoofyListMapThing()
glmt['my-key'] = String('el1')
self.assertEquals(glmt.silly, "spam")
glmt['my-key']
self.assertEquals(glmt.silly, "eggs")
def test_tuple_equality(self):
self.assertEqual([(1,), [1]].count([1]), 1) # http://bugs.jython.org/issue1317
class ThreadSafetyTestCase(unittest.TestCase):
def run_threads(self, f, num=10):
threads = []
for i in xrange(num):
t = threading.Thread(target=f)
t.start()
threads.append(t)
timeout = 10. # be especially generous
for t in threads:
t.join(timeout)
timeout = 0.
for t in threads:
self.assertFalse(t.isAlive())
def test_append_remove(self):
# derived from Itamar Shtull-Trauring's test for issue 521701
lst = []
def tester():
ct = threading.currentThread()
for i in range(1000):
lst.append(ct)
time.sleep(0.0001)
lst.remove(ct)
self.run_threads(tester)
self.assertEqual(lst, [])
def test_sort(self):
lst = []
def tester():
ct = threading.currentThread()
for i in range(1000):
lst.append(ct)
lst.sort()
lst.remove(ct)
time.sleep(0.0001)
self.run_threads(tester)
self.assertEqual(lst, [])
def test_count_reverse(self):
lst = [0,1,2,3,4,5,6,7,8,9,10,0]
def tester():
ct = threading.currentThread()
for i in range(1000):
self.assertEqual(lst[0], 0)
if random.random() > 0.5:
time.sleep(0.0001)
lst.reverse()
self.assertEqual(lst.count(0), 2)
self.assert_(lst[1] in (1,10))
self.run_threads(tester)
def test_main():
test_support.run_unittest(ListTestCase, ThreadSafetyTestCase)
if __name__ == "__main__":
test_main()
|
dhermes/google-cloud-python
|
spanner/google/cloud/spanner_v1/client.py
|
Python
|
apache-2.0
| 11,355
| 0.000264
|
# Copyright 2016 Google LLC All rights reserved.
#
# Licensed under the Apache Lice
|
nse, Version 2.0 (the "License");
# you may not use this file except in compliance wit
|
h the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Parent client for calling the Cloud Spanner API.
This is the base from which all interactions with the API occur.
In the hierarchy of API concepts
* a :class:`~google.cloud.spanner_v1.client.Client` owns an
:class:`~google.cloud.spanner_v1.instance.Instance`
* a :class:`~google.cloud.spanner_v1.instance.Instance` owns a
:class:`~google.cloud.spanner_v1.database.Database`
"""
from google.api_core.gapic_v1 import client_info
# pylint: disable=line-too-long
from google.cloud.spanner_admin_database_v1.gapic.database_admin_client import ( # noqa
DatabaseAdminClient,
)
from google.cloud.spanner_admin_instance_v1.gapic.instance_admin_client import ( # noqa
InstanceAdminClient,
)
# pylint: enable=line-too-long
from google.cloud._http import DEFAULT_USER_AGENT
from google.cloud.client import ClientWithProject
from google.cloud.spanner_v1 import __version__
from google.cloud.spanner_v1._helpers import _metadata_with_prefix
from google.cloud.spanner_v1.instance import DEFAULT_NODE_COUNT
from google.cloud.spanner_v1.instance import Instance
_CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__)
SPANNER_ADMIN_SCOPE = "https://www.googleapis.com/auth/spanner.admin"
class InstanceConfig(object):
"""Named configurations for Spanner instances.
:type name: str
:param name: ID of the instance configuration
:type display_name: str
:param display_name: Name of the instance configuration
"""
def __init__(self, name, display_name):
self.name = name
self.display_name = display_name
@classmethod
def from_pb(cls, config_pb):
"""Construct an instance from the equvalent protobuf.
:type config_pb:
:class:`~google.spanner.v1.spanner_instance_admin_pb2.InstanceConfig`
:param config_pb: the protobuf to parse
:rtype: :class:`InstanceConfig`
:returns: an instance of this class
"""
return cls(config_pb.name, config_pb.display_name)
class Client(ClientWithProject):
"""Client for interacting with Cloud Spanner API.
.. note::
Since the Cloud Spanner API requires the gRPC transport, no
``_http`` argument is accepted by this class.
:type project: :class:`str` or :func:`unicode <unicode>`
:param project: (Optional) The ID of the project which owns the
instances, tables and data. If not provided, will
attempt to determine from the environment.
:type credentials:
:class:`OAuth2Credentials <oauth2client.client.OAuth2Credentials>` or
:data:`NoneType <types.NoneType>`
:param credentials: (Optional) The OAuth2 Credentials to use for this
client. If not provided, defaults to the Google
Application Default Credentials.
:type user_agent: str
:param user_agent: (Optional) The user agent to be used with API request.
Defaults to :const:`DEFAULT_USER_AGENT`.
:raises: :class:`ValueError <exceptions.ValueError>` if both ``read_only``
and ``admin`` are :data:`True`
"""
_instance_admin_api = None
_database_admin_api = None
_SET_PROJECT = True # Used by from_service_account_json()
SCOPE = (SPANNER_ADMIN_SCOPE,)
"""The scopes required for Google Cloud Spanner."""
def __init__(self, project=None, credentials=None, user_agent=DEFAULT_USER_AGENT):
# NOTE: This API has no use for the _http argument, but sending it
# will have no impact since the _http() @property only lazily
# creates a working HTTP object.
super(Client, self).__init__(
project=project, credentials=credentials, _http=None
)
self.user_agent = user_agent
@property
def credentials(self):
"""Getter for client's credentials.
:rtype:
:class:`OAuth2Credentials <oauth2client.client.OAuth2Credentials>`
:returns: The credentials stored on the client.
"""
return self._credentials
@property
def project_name(self):
"""Project name to be used with Spanner APIs.
.. note::
This property will not change if ``project`` does not, but the
return value is not cached.
The project name is of the form
``"projects/{project}"``
:rtype: str
:returns: The project name to be used with the Cloud Spanner Admin
API RPC service.
"""
return "projects/" + self.project
@property
def instance_admin_api(self):
"""Helper for session-related API calls."""
if self._instance_admin_api is None:
self._instance_admin_api = InstanceAdminClient(
credentials=self.credentials, client_info=_CLIENT_INFO
)
return self._instance_admin_api
@property
def database_admin_api(self):
"""Helper for session-related API calls."""
if self._database_admin_api is None:
self._database_admin_api = DatabaseAdminClient(
credentials=self.credentials, client_info=_CLIENT_INFO
)
return self._database_admin_api
def copy(self):
"""Make a copy of this client.
Copies the local data stored as simple types but does not copy the
current state of any open connections with the Cloud Bigtable API.
:rtype: :class:`.Client`
:returns: A copy of the current client.
"""
return self.__class__(
project=self.project,
credentials=self._credentials,
user_agent=self.user_agent,
)
def list_instance_configs(self, page_size=None, page_token=None):
"""List available instance configurations for the client's project.
.. _RPC docs: https://cloud.google.com/spanner/docs/reference/rpc/\
google.spanner.admin.instance.v1#google.spanner.admin.\
instance.v1.InstanceAdmin.ListInstanceConfigs
See `RPC docs`_.
:type page_size: int
:param page_size: (Optional) Maximum number of results to return.
:type page_token: str
:param page_token: (Optional) Token for fetching next page of results.
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns:
Iterator of
:class:`~google.cloud.spanner_v1.instance.InstanceConfig`
resources within the client's project.
"""
metadata = _metadata_with_prefix(self.project_name)
path = "projects/%s" % (self.project,)
page_iter = self.instance_admin_api.list_instance_configs(
path, page_size=page_size, metadata=metadata
)
page_iter.next_page_token = page_token
page_iter.item_to_value = _item_to_instance_config
return page_iter
def instance(
self,
instance_id,
configuration_name=None,
display_name=None,
node_count=DEFAULT_NODE_COUNT,
):
"""Factory to create a instance associated with this client.
:type instance_id: str
:param instance_id: The ID of the instance.
:type configuration_name: string
:param configuration_name:
(Optional) Name of the instance configuration used to set up the
instance's cluster, in the form:
``projects/<project>/instanceConfigs/<config>``.
**Required** for instances which do not yet exist.
:type display_name: str
|
denniskline/garage-door
|
alarm_door_open.py
|
Python
|
apache-2.0
| 2,822
| 0.006378
|
#!/usr/bin/python3
import os
import time
import logging
import getopt
import sys
from gdmod import ApplicationConfiguration
from gdmod import Database
from gdmod import DoorState
from gdmod import Sms
# ************************************************************************
# Schedule to run whenever you would like to be alerted and for whatever interval :
#
# Run every 15 minutes between the hours of 10pm and 5am
# sudo crontab -e (sudo is needed to access GPIO)
# */15 0-5,22,23 * * * <BASE_DIR>/gd/garage-door/control.sh start alarm_door_open
# *****************************
|
*******************************************
def main():
# Check command options to see if a custom configuration directory was supplied
configDir = os.path.abspath(get_config_directory(sys.argv[1:], './conf'))
if not os.path.isdir(configDir):
|
raise ValueError('No such configuration directory exists: {}'.format(configDir))
# Read in the configurations
config = ApplicationConfiguration(configDir, ['door.ini', 'account-settings.ini'])
# Setup logger
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
filename=config.get('door.alarm.open.log.file.directory') + '/' + config.get('door.alarm.open.log.file.name'), level=logging.INFO)
# Instantiate all the required modules
db = Database(config.get('app.database.file'))
doorState = DoorState()
sms = Sms(db, config.get('sms.account.id'), config.get('sms.account.token'), config.get('sms.account.phone.number'))
logging.info("Starting Open Door Alarm Check")
# Run the validation check with 5 retries if there is a failure
for x in range(0, 5):
try:
logging.info('Executing check')
if not doorState.is_door_closed():
logging.warn('Door is not closed. Initiating Alarm')
alarm(sms, find_user_sms_numbers(config))
return
except:
logging.error('Failed on attempt {} exeucting door open alarm'.format(x), exc_info=True)
time.sleep(300) # Wait a nice long time before retrying again
pass
logging.info("Completed Reporting")
def alarm(sms, phoneNumbers):
message = "It is late and the garage door is still open. Please close."
for phoneNumber in phoneNumbers:
sms.send(phoneNumber, message)
return message
def find_user_sms_numbers(config):
return [x.strip() for x in config.get('sms.door.command.allowed.phonenumbers').split(',')]
def get_config_directory(args, default):
options, remainder = getopt.getopt(args, 'c:', ['configdirectory=',])
for opt, arg in options:
if opt in ('-c', '--configdirectory'):
return arg
return default
if __name__ == "__main__":
main()
|
IT-PM-OpenAdaptronik/Webapp
|
apps/projects/serializer.py
|
Python
|
mit
| 2,086
| 0.011026
|
from apps.projects.models import Experiment, Project, Datarow, Value
def project_serialize(project_id):
#get all experiments from that project into a nested list of dictionaries to post to the selected webservice
experiment_objects = list(Experiment.objects.filter(project=project_id))
for experiment_object in experiment_objects:
datarow_objects = list(Datarow.objects.filter(experiment=experiment_object.id))
datarow_objects_list = []
for datarow_object in datarow_objects:
data_objects_list = list(Value.objects.filter(datarow=datarow_object.id).values_list('value', flat=True))
#convert decimal values to string to make them serializable for json
#for data_object in data_objects_list:
# data_object = str(data_object)
data_objects_list = list(map(lambda x:str(x),data_objects_list))
|
datarow_attributes = {
'name' : datarow_object.name,
'unit' : datarow_object.unit,
'description' : datarow_object.description,
'function_type' : datarow_object.function_type,
'response_node' : datarow_object.response_node,
'response_name' :
|
datarow_object.response_name,
'response_dir' : datarow_object.response_dir,
'reference_node' : datarow_object.reference_node,
'reference_name' : datarow_object.reference_name,
'reference_dir' : datarow_object.reference_dir,
'data_format' : datarow_object.data_format,
'data_type' : datarow_object.data_type,
'measuring_instrument' : datarow_object.measuring_instrument,
'data': data_objects_list
}
datarow_objects_list.append(datarow_attributes)
experiment_attributes = {
'name': experiment_object.name,
'description': experiment_object.description,
'datarows': datarow_objects_list
}
input = experiment_attributes
return input
|
ChrisCummins/intel-gpu-tools
|
tools/quick_dump/reg_access.py
|
Python
|
mit
| 473
| 0.042283
|
#!/usr/bin/env python3
import chipset
def read(reg):
reg = int(reg, 16)
val = chipset.intel_register_read(reg)
return val
def init():
pci_dev = chipset.intel_ge
|
t_pci_device()
ret = chipset.intel_register_access_init(pci_dev, 0)
if ret != 0:
print("Register access init failed");
return False
return True
if __name__ == "__main__":
import sys
if init() == Fal
|
se:
sys.exit()
reg = sys.argv[1]
print(hex(read(reg)))
chipset.intel_register_access_fini()
|
Laharah/calibre-access
|
calibre_access/__init__.py
|
Python
|
mit
| 386
| 0.005181
|
from .calibre_access import (print_record, calibre_downloads, calibre_searches,
|
all_records, download_coro, search_coro, download_database,
locate_logs, get_database)
__all__ = (print_record, calibre_download
|
s, calibre_searches, all_records, download_coro,
search_coro, download_database, locate_logs, get_database)
|
aipescience/daiquiri-admin
|
daiquiri/machine.py
|
Python
|
apache-2.0
| 4,577
| 0.000874
|
import os
import pwd
import spwd
import grp
import subprocess
class Machine():
def __init__(self, dryrun=False, default_gid=2000, uid_range=[2000, 3000]):
self.dryrun = dryrun
self.default_gid = default_gid
self.uid_range = uid_range
def call(self, cmd):
if self.dryrun:
print cmd
else:
subprocess.call(cmd, shell=True)
def mkdir(self, path):
if self.dryrun:
print 'os.mkdir(\'%s\')' % path
else:
os.mkdir(path)
def chown(self, path, uid, gid):
u = int(uid)
g = int(gid)
if self.dryrun:
print 'os.chown(\'%s\', %i, %i)' % (path, u, g)
else:
os.chown(path, u, g)
def get_new_uid(self):
uid = self.uid_range[0]
for system_user in sorted(pwd.getpwall(), key=lambda k: k.pw_uid):
if system_user.pw_uid <= self.uid_range[1] and system_user.pw_uid > uid:
|
uid = system_user.pw_uid
return uid + 1
def get_full_name(self, user):
return user['details']['firstname'] + ' ' + user['details']['lastname']
def create_user(self, user, password):
# get the username
username = user['username']
# check if the user exists already
try:
pwd.getpwnam(username)
raise Exception('User %s already ex
|
ists.' % username)
except KeyError:
pass
# get the uid for the new user
try:
uid = int(user['details']['UID'])
except KeyError:
uid = self.get_new_uid()
# check if the uid is not already there
try:
pwd.getpwuid(uid)
raise Exception('UID %s already exists.' % uid)
except KeyError:
pass
# get the gid for the new user
try:
gid = int(user['details']['GID'])
except KeyError:
gid = self.default_gid
# check if the gid exists
try:
grp.getgrgid(gid)
except KeyError:
raise Exception('GID %s does not exist.' % gid)
# get the fullname
fullname = self.get_full_name(user)
# create new user on the machine
print 'creating user', username
self.call("useradd -m -s /bin/bash -u %i -g %i -p '%s' -c '%s' %s" % (uid, gid, password, fullname, username))
return uid, gid
def update_user(self, user, password):
# get the username
username = user['username']
# check if the user exists, if not return silently
try:
system_user = pwd.getpwnam(username)
system_password = spwd.getspnam(username)
except KeyError:
return
# enable the user if he or she was disabled
if (system_password.sp_pwd.startswith('!')):
print 'unlocking user', username
self.call("usermod -U %s" % username)
# fetch proper password
system_password = spwd.getspnam(username)
# a flag if uid or gid have changed
uid_gid_changed = False
# check full name
fullname = self.get_full_name(user)
if (fullname != system_user.pw_gecos.decode('utf-8')):
print 'updating fullname (i.e. comment) for', username
self.call(u'usermod -c \'%s\' %s' % (fullname, username))
# check uid
if (int(user['details']['UID']) != system_user.pw_uid):
print 'updating uid for', username
self.call("usermod -u '%s' %s" % (user['details']['UID'], username))
uid_gid_changed = True
# check gid
if (int(user['details']['GID']) != system_user.pw_gid):
print 'updating gid for', username
self.call("usermod -g '%s' %s" % (user['details']['GID'], username))
uid_gid_changed = True
# check password
if (password != system_password.sp_pwd):
print 'updating password for', username
self.call("usermod -p '%s' %s" % (password, username))
return uid_gid_changed
def disable_user(self, username):
# check if the user exists, if not return silently
try:
system_password = spwd.getspnam(username)
except KeyError:
return
# check if the user is alredy locked, if yes return silently
if (system_password.sp_pwd.startswith('!')):
return
# lock the user
print 'locking user', username
self.call("usermod -L %s" % username)
|
projeto-si-lansab/si-lansab
|
ARDrone/libARDrone.py
|
Python
|
gpl-2.0
| 36,206
| 0.016434
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
python library for the AR.Drone 1.0 (1.11.5) and 2.0 (2.2.9).
parts of code from Bastian Venthur, Jean-Baptiste Passot, Florian Lacrampe.
tested with Python 2.7.3 and AR.Drone vanilla firmware 1.11.5.
"""
# < imports >--------------------------------------------------------------------------------------
import logging
import multiprocessing
import sys
import threading
import time
import arATCmds
import arNetwork
import arIPCThread
# < variáveis globais >----------------------------------------------------------------------------
# logging level
w_logLvl = logging.ERROR
# < class ARDrone >--------------------------------------------------------------------------------
class ARDrone ( object ):
"""
ARDrone class.
instanciate this class to control AR.Drone and receive decoded video and navdata.
"""
# ---------------------------------------------------------------------------------------------
# ARDrone::__init__
# ---------------------------------------------------------------------------------------------
def __init__ ( self ):
# sherlock logger
# l_log = logging.getLogger ( "ARDrone::__init__" )
# l_log.setLevel ( w_logLvl )
# l_log.debug ( ">>" )
self.seq_nr = 1
self.timer_t = 0.2
self.com_watchdog_timer = threading.Timer ( self.timer_t, self.commwdg )
self.lock = threading.Lock ()
self.speed = 0.2
self.at ( arATCmds.at_config, "general:navdata_demo", "TRUE" )
self.vid_pipe, vid_pipe_other = multiprocessing.Pipe ()
self.nav_pipe, nav_pipe_other = multiprocessing.Pipe ()
self.com_pipe, com_pipe_other = multiprocessing.Pipe ()
self.network_process = arNetwork.ARDroneNetworkProcess ( nav_pipe_other, vid_pipe_other, com_pipe_other )
self.network_process.start ()
self.ipc_thread = arIPCThread.IPCThread ( self )
self.ipc_thread.start ()
self.image = None
self.navdata = {}
self.time = 0
# sherlock logger
# l_log.debug ( "<<" )
# ---------------------------------------------------------------------------------------------
# ARDrone::apply_command
# ---------------------------------------------------------------------------------------------
def apply_command ( self, f_command ):
# sherlock logger
# l_log = logging.getLogger ( "ARDrone::apply_command" )
# l_log.setLevel ( w_logLvl )
# l_log.debug ( ">>" )
las_available_commands = [ "emergency", "hover", "land",
"move_backward", "move_down", "move_forward",
"move_left", "move_right", "move_up",
"takeoff", "turn_left", "turn_right", ]
# validade command
if ( f_command not in las_available_commands ):
# sherlock logger
# l_log.error ( "Command %s not recognized !" % f_command )
# sherlock logger
# l_log.debug ( "<< (E01)" )
return
if ( "hover" != f_command ):
self.last_command_is_hovering = False
if ( "emergency" == f_command ):
self.reset ()
elif (( "hover" == f_command ) and ( not self.last_command_is_hovering )):
self.hover ()
self.last_command_is_hovering = True
elif ( "land" == f_command ):
self.land ()
self.last_command_is_hovering = True
elif ( "move_backward" == f_command ):
self.move_backward ()
elif ( "move_forward" == f_command ):
self.move_forward ()
elif ( "move_down" == f_command ):
self.move_down ()
elif ( "move_up" == f_command ):
self.move_up ()
elif ( "move_left" == f_command ):
self.move_left ()
elif ( "move_right" == f_command ):
self.move_right ()
elif ( "takeoff" == f_command ):
self.takeoff ()
self.last_command_is_hovering = True
elif ( "turn_left" == f_command ):
self.turn_left ()
elif ( "turn_right" == f_command ):
self.turn_right ()
# sherlock logger
# l_log.debug ( "<<" )
# ---------------------------------------------------------------------------------------------
# ARDrone::at
# ---------------------------------------------------------------------------------------------
def at ( self, f_cmd, *args, **kwargs ):
"""
wrapper for the low level at commands.
this method takes care that the sequence number is increased after each at command and the
watchdog timer is started to make sure the drone receives a command at least every second.
"""
# sherlock logger
# l_log = logging.getLogger ( "ARDrone::at" )
# l_log.setLevel ( w_logLvl )
# l_log.debug ( ">>" )
self.lock.acquire ()
self.com_watchdog_timer.cancel ()
f_cmd ( self.seq_nr, *args, **kwargs )
self.seq_nr += 1
self.com_watchdog_timer = threading.Timer ( self.timer_t, self.commwdg )
self.com_watchdog_timer.start ()
self.lock.release ()
# sherlock logger
# l_log.debug ( "<<" )
# ---------------------------------------------------------------------------------------------
# ARDrone::commwdg
# ---------------------------------------------------------------------------------------------
def commwdg ( self ):
"""
communication watchdog signal.
this needs to be send regulary to keep the communication with the drone alive.
"""
# sherlock logger
# l_log = logging.getLogger ( "ARDrone::commwdg" )
# l_log.setLevel ( w_logLvl )
# l_log.debug ( ">>" )
self.at ( arATCmds.at_comwdg )
# sherlock logger
# l_log.debug ( "<<" )
# ---------------------------------------------------------------------------------------------
# ARDrone::event_boom
# ---------------------------------------------------------------------------------------------
def event_boom ( self ):
"""
boom event
"""
# sherlock logger
# l_log = logging.getLogger ( "ARDrone::event_boom" )
# l_log.setLevel ( w_logLvl )
# l_log.debug ( ">>" )
# animation to play
li_anim = arDefs.ARDRONE_LED_ANIMATION_DOUBLE_MISSILE
# frequence in HZ of the animation
lf_freq = 2.
# total duration in seconds of the animation
lf_secs = 4
# play LED animation
self.at ( arATCmds.at_led, li_anim, lf_freq, lf_secs )
# animation to play
li_anim = arDefs.ARDRONE_ANIMATION_THETA_30_DEG
# total duration in seconds of the animation
lf_secs = 1000
# play motion animation
self.at ( arATCmds.at_anim, li_anim, lf_secs )
# sherlock logger
# l_log.debug ( "<<" )
# ---------------------------------------------------------------------------------------------
# ARDrone::event_thetamixed
# ---------------------------------------------------------------------------------------------
def event_thetamixed ( self ):
"""
make the drone execute thetamixed !
"""
# sherlock logger
# l_log = logging.getLogger ( "ARDrone::event_theta
|
mixed" )
# l_log.setLevel ( w_logLvl )
# l_log.debug ( ">>" )
# animation to play
li_anim = arDefs.ARDRONE_LED_A
|
NIMATION_DOUBLE_MISSILE
# frequence in HZ of the animation
lf_freq = 2.
# total duration in seconds of the animation
lf_secs = 4
# play LED animation
self.at ( arATCmds.at_led, li_anim, lf_freq, lf_secs )
# animation to play
li_anim = arDefs.ARDRONE_ANIMATION_THETA_MIXED
# total duration in seconds of the animation
lf_secs = 5000
# play motio
|
altendky/canmatrix
|
src/canmatrix/cli/convert.py
|
Python
|
bsd-2-clause
| 10,309
| 0.006208
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Eduard Broecker
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that
# the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this list of conditions and the
# following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
from __future__ import absolute_import, division, print_function
import logging
import sys
import click
import canmatrix.convert
import canmatrix.log
logger = logging.getLogger(__name__)
def get_formats():
input = ""
output = ""
for suppFormat, features in canmatrix.formats.supportedFormats.items():
if 'load' in features:
input += suppFormat + "\n"
if 'dump' in features:
output += suppFormat + "\n"
return (input, output)
@click.command()
# global switches
@click.option('-v', '--verbose', 'verbosity', count=True, default=1)
@click.option('-s', '--silent/--no-silent', is_flag=True, default=False, help="don't print status messages to stdout. (only errors)")
@click.option('-f', '--force_output', help="enforce output format, ignoring output file extension (e.g., -f csv).\nSupported formats for writing:\n" + get_formats()[1])
@click.option('-i', '--input_format', 'import_type', help="give hint for input format\nSupported formats for reading:\n" + get_formats()[0])
@click.option('--ignoreEncodingErrors/--no-ignoreEncodingErrors', 'ignoreEncodingErrors', default=False, help="ignore character encoding errors during export (dbc,dbf,sym)")
# manipulation and filter switches
@click.option('--deleteObsoleteDefines/--no-deleteObsoleteDefines', 'deleteObsoleteDefines', default=False, help="delete defines from all ECUs, frames and Signals\nExample --deleteObsoleteDefines")
@click.option('--deleteEcu', 'deleteEcu', help="delete Ecu form databases. (comma separated list)\nSyntax: --deleteEcu=myEcu,mySecondEcu")
@click.option('--renameEcu', 'renameEcu', help="rename Ecu form databases. (comma separated list)\nSyntax: --renameEcu=myOldEcu:myNewEcu,mySecondEcu:mySecondNewEcu")
@click.option('--deleteSignal', 'deleteSignal', help="delete Signal form databases. (comma separated list)\nSyntax: --deleteSignal=mySignal1,mySecondSignal")
@click.option('--renameSignal', 'renameSignal', help="rename Signal form databases. (comma separated list)\nSyntax: --renameSignal=myOldSignal:myNewSignal,mySecondSignal:mySecondNewSignal")
@click.option('--deleteZeroSignals/--no-deleteZeroSignals', 'deleteZeroSignals', default=False, help="delete zero length signals (signals with 0 bit length) from matrix\ndefault False")
@click.option('--deleteSignalAttributes', 'deleteSignalAttributes', help="delete attributes from all signals\nExample --deleteSignalAttributes GenMsgSomeVar,CycleTime")
@click.option('--deleteFrame', 'deleteFrame', help="delete Frame form databases. (comma separated list)\nSyntax: --deleteFrame=myFrame1,mySecondFrame")
@click.option('--renameFrame', 'renameFrame', help="increment each frame.id in database by increment\nSyntax: --frameIdIncrement=increment")
@click.option('--addFrameReceiver', 'addFrameReceiver', help="add receiver Ecu to frame(s) (comma separated list)\nSyntax: --addFrameReceiver=framename:myNewEcu,mySecondEcu:myNEWEcu")
@click.option('--changeFrameId', 'changeFrameId', help="change frame.id in database\nSyntax: --changeFrameId=oldId:newId")
@click.option('--setFrameFd', 'setFrameFd', help="set Frame from database to canfd. (comma separated list)\nSyntax: --setFrameFd=myFrame1,mySecondFrame")
@click.option('--unsetFrameFd', 'unsetFrameFd', help="set Frame from database to normal (not FD). (comma separated list)\nSyntax: --unsetFrameFd=myFrame1,mySecondFrame")
@click.option('--recalcDLC', 'recalcDLC', help="recalculate dlc; max: use maximum of stored and calculated dlc; force: force new calculated dlc")
@click.option('--skipLongDlc', 'skipLongDlc', help="skip all Frames with dlc bigger than given threshold")
@click.option('--cutLongFrames', 'cutLongFrames', help="cut all signals out of Frames with dlc bigger than given threshold")
@click.option('--deleteFrameAttributes', 'deleteFrameAttributes', help="delete attributes from all frames\nExample --deleteFrameAttributes GenMsgSomeVar,CycleTime")
@click.option('--ecus', help="Copy only given ECUs (comma separated list) to target matrix; suffix 'rx' or 'tx' for selection: Example: --ecus FirstEcu:rx,SecondEcu:tx,ThirdEcu")
@click.option('--frames', help="Copy only given Frames (comma separated list) to target matrix")
@click.option('--signals', help="Copy only given Signals (comma separated list) to target matrix just as 'free' signals without containing frame")
@click.option('--merge', help="merge additional can databases.\nSyntax: --merge filename[:ecu=SOMEECU][:frame=FRAME1][:frame=FRAME2],filename2")
# arxml switches
@click.option('--arxmlIgnoreClusterInfo/--no-arxmlIgnoreClusterInfo', 'arxmlIgnoreClusterInfo', default=False, help="Ignore any can cluster info from arxml; Import all frames in one matrix\ndefault False")
@click.option('--arxmlUseXpath(--no-arxmlUseXpath', 'arxmlUseXpath', default=False, help="Use experimental Xpath-Implementation for resolving AR-Paths; \ndefault False")
@click.option('--arxmlExportVersion', 'arVersion', default="3.2.3", help="Set output AUTOSAR version\ncurrently only 3.2.3 and 4.1.0 are supported\ndefault 3.2.3")
# dbc switches
@click.option('--dbcImportEncoding', 'dbcImportEncoding', default="iso-8859-1", help="Import charset of dbc (relevant for units), maybe utf-8\ndefault iso-8859-1")
@click.option('--dbcImportCommentEncoding', 'dbcImportCommentEncoding', default="iso-8859-1", help="Import charset of Comments in dbc\ndefault iso-8859-1")
@click.option('--dbcExportEncoding', 'dbcExportEncoding', default="iso-8859-1", help="Export charset of dbc (relevant for units), maybe utf-8\ndefault iso-8859-1")
@click.option('--dbcExportCommentEncoding', 'dbcExportCommentEncoding', default="iso-8859-1", help="Export charset of comments in dbc\ndefault iso-8859-1")
@click.option('--dbcUniqueSignalNames/--no-dbcUniqueSignalNames', 'dbcUniqueSignalNames', default=True, help="Check if signal names are unique per frame")
# dbf switches
@click.option('--dbfImportEncoding', 'dbfImportEncoding', defau
|
lt="iso-8859-1", help="Import charset of dbf, maybe utf-8\ndefault iso-8859-1")
@click.option('--dbfExportEncoding', 'dbfExportEncoding', default="iso-8859-1", help="Export charset of dbf, maybe utf-8\ndefault iso-8859-1")
# sym switches
@click.option('--symImportEncoding', 'symImportEncoding', default="iso-8859-1", help="Import charset of sym format, maybe utf-8\ndefault iso-8859-1")
@click.option('--symExportEncoding', 'symExportEncoding', default="iso-8859-1", help="Export charset of sym format, mayb
|
e utf-8\ndefault iso-8859-1")
# xls/csv switches
@click.option('--xlsMotorolaBitFormat', 'xlsMotorolaBitFormat', default="msbreverse", help="Excel format for startbit of motorola codescharset signals\nValid values: msb, lsb, msbreverse\n default msbreverse")
@click.option('--additionalFrameAttributes', 'additionalFrameAttributes', default = "", help="ap
|
apple/swift
|
utils/pass-pipeline/src/pass_pipeline_library.py
|
Python
|
apache-2.0
| 2,942
| 0
|
import pass_pipeline as ppipe
import passes as p
def simplifycfg_silcombine_passlist():
return ppipe.PassList([
p.SimplifyCFG,
p.SILCombine,
p.SimplifyCFG,
])
def highlevel_loopopt_passlist():
return ppipe.PassList([
p.LowerAggregateInstrs,
p.SILCombine,
p.SROA,
p.Mem2Reg,
p.DCE,
p.SILCombine,
simplifycfg_silcombine_passlist(),
p.LoopRotate,
p.DCE,
p.CSE,
p.SILCombine,
p.SimplifyCFG,
p.ABCOpt,
p.DCE,
p.COWArrayOpts,
p.DCE,
p.SwiftArrayOpts,
])
def lowlevel_loopopt_passlist():
return ppipe.PassList([
p.LICM,
p.DCE,
p.CSE,
p.SILCombine,
p.SimplifyCFG,
])
def inliner_for_optlevel(optlevel):
if optlevel == 'high':
return p.EarlyInliner
elif optlevel == 'mid':
return p.PerfInliner
elif optlevel == 'low':
return p.LateInliner
else:
raise RuntimeError('Unknown opt level')
def ssapass_passlist(optlevel):
return ppipe.PassList([
simplifycfg_silcombine_passlist(),
p.AllocBoxToStack,
p.CopyForwarding,
p.LowerAggregateInstrs,
p.SILCombine,
p.SROA,
p.Mem2Reg,
p.PerformanceConstantP
|
ropagation,
p.DCE,
p.CSE,
p.SILCombine,
simplifycfg_silcombine_passlist(),
p.GlobalLoadStoreOpts,
|
# Need to add proper argument here
p.CodeMotion,
p.GlobalARCOpts,
p.SpeculativeDevirtualizer,
p.SILLinker,
inliner_for_optlevel(optlevel),
p.SimplifyCFG,
p.CodeMotion,
p.GlobalARCOpts,
])
def lower_passlist():
return ppipe.PassList([
p.DeadFunctionElimination,
p.DeadObjectElimination,
p.GlobalOpt,
p.CapturePropagation,
p.ClosureSpecializer,
p.SpeculativeDevirtualizer,
p.FunctionSignatureOpts,
])
def normal_passpipelines():
result = []
x = ppipe.PassPipeline('HighLevel', {'name': 'run_n_times', 'count': 2})
x.add_pass(ssapass_passlist('high'))
result.append(x)
x = ppipe.PassPipeline('EarlyLoopOpt', {'name': 'run_n_times', 'count': 1})
x.add_pass(highlevel_loopopt_passlist())
result.append(x)
x = ppipe.PassPipeline('MidLevelOpt', {'name': 'run_n_times', 'count': 2})
x.add_pass(ssapass_passlist('mid'))
result.append(x)
x = ppipe.PassPipeline('Lower', {'name': 'run_to_fixed_point'})
x.add_pass(lower_passlist())
result.append(x)
x = ppipe.PassPipeline('LowLevel', {'name': 'run_n_times', 'count': 1})
x.add_pass(ssapass_passlist('low'))
result.append(x)
x = ppipe.PassPipeline('LateLoopOpt', {'name': 'run_n_times', 'count': 1})
x.add_pass([lowlevel_loopopt_passlist(), p.DeadFunctionElimination])
result.append(x)
return result
|
wheeler-microfluidics/microdrop
|
microdrop/core_plugins/command_plugin/microdrop_plugin.py
|
Python
|
bsd-3-clause
| 3,275
| 0.000305
|
import logging
from logging_helpers import _L
from pygtkhelpers.gthreads import gtk_threadsafe
import threading
import zmq
from .plugin import CommandZmqPlugin
from ...app_context import get_hub_uri
from ...plugin_helpers import hub_execute
from ...plugin_manager import (PluginGlobals, SingletonPlugin, IPlugin,
implements)
logger = logging.getLogger(__name__)
PluginGlobals.push_env('microdrop')
class CommandPlugin(SingletonPlugin):
"""
This class is automatically registered with the PluginManager.
"""
implements(IPlugin)
plugin_name = 'microdrop.command_plugin'
def __init__(self):
self.name = self.plugin_name
self.plugin = None
self.stopped = threading.Event()
def on_plugin_enable(self):
"""
Handler called once the plugin instance is enabled.
Note: if you inherit your plugin from AppDataController and don't
implement this handler, by default, it will automatically load all
app options from the config file. If you decide to overide the
default handler, you should call:
AppDataController.on_plugin_enable(self)
to retain this functionality.
.. versionchanged:: 2.11.2
L
|
aunch background thread to monitor plugin ZeroMQ command socket.
Use :func:`gtk_threadsafe` decorator to wrap thread-related code
to ensure GTK/GDK
|
are initialized properly for a threaded
application.
"""
self.cleanup()
zmq_ready = threading.Event()
def _check_command_socket(wait_duration_s):
'''
Process each incoming message on the ZeroMQ plugin command socket.
Stop listening if :attr:`stopped` event is set.
'''
self.stopped.clear()
self.plugin = CommandZmqPlugin(self, self.name, get_hub_uri())
# Initialize sockets.
self.plugin.reset()
zmq_ready.set()
while not self.stopped.wait(wait_duration_s):
try:
msg_frames = (self.plugin.command_socket
.recv_multipart(zmq.NOBLOCK))
except zmq.Again:
pass
else:
self.plugin.on_command_recv(msg_frames)
thread = threading.Thread(target=_check_command_socket,
args=(0.01, ))
thread.daemon = True
thread.start()
zmq_ready.wait()
def cleanup(self):
self.stopped.set()
if self.plugin is not None:
self.plugin = None
def on_plugin_enabled(self, *args, **kwargs):
# A plugin was enabled. Call `get_commands()` on self to trigger
# refresh of commands in case the enabled plugin registered a command.
hub_execute(self.name, 'get_commands')
_L().info('refreshed registered commands.')
def on_plugin_disable(self):
"""
Handler called once the plugin instance is disabled.
"""
self.cleanup()
def on_app_exit(self):
"""
Handler called just before the MicroDrop application exits.
"""
self.cleanup()
PluginGlobals.pop_env()
|
google-research/disentanglement_lib
|
disentanglement_lib/config/abstract_reasoning_study_v1/stage1/sweep.py
|
Python
|
apache-2.0
| 5,267
| 0.010442
|
# coding=utf-8
# Copyright 2018 The DisentanglementLib Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Hyperparameter sweeps and configs for stage 1 of "abstract_reasoning_study".
Are Disentangled Representations Helpful for Abstract Visual Reasoning?
Sjoerd van Steenkiste, Francesco Locatello, Juergen Schmidhuber, Olivier Bachem.
NeurIPS, 2019.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from disentanglement_lib.config import study
from disentanglement_lib.utils import resources
import disentanglement_lib.utils.hyperparams as h
from six.moves import range
def get_datasets():
"""Returns all the data sets."""
return h.sweep(
"dataset.name",
h.categorical(["shapes3d", "abstract_dsprites"]))
def get_num_latent(sweep):
return h.sweep("encoder.num_latent", h.discrete(sweep))
def get_seeds(num):
"""Returns random seeds."""
return h.sweep("model.random_seed", h.categorical(list(range(num))))
def get_default_models():
"""Our default set of models (6 model * 6 hyperparameters=36 models)."""
# BetaVAE config.
model_name = h.fixed("model.name", "beta_vae")
model_fn = h.fixed("model.model", "@vae()")
betas = h.sweep("vae.beta", h.discrete([1., 2., 4., 6., 8., 16.]))
config_beta_vae = h.zipit([model_name, betas, model_fn])
# AnnealedVAE config.
model_name = h.fixed("model.name", "annealed_vae")
model_fn = h.fixed("model.model", "@annealed_vae()")
iteration_threshold = h.fixed("annealed_vae.iteration_threshold", 100000)
c = h.sweep("annealed_vae.c_max", h.discrete([5., 10., 25., 50., 75., 100.]))
gamma = h.fixed("annealed_vae.gamma", 1000)
config_annealed_beta_vae = h.zipit(
[model_name, c, iteration_threshold, gamma, model_fn])
# FactorVAE config.
model_name = h.fixed("model.name", "factor_vae")
model_fn = h.fixed("model.model", "@factor_vae()")
discr_fn = h.fixed("discriminator.discriminator_fn", "@fc_discriminator")
gammas = h.sweep("factor_vae.gamma",
h.discrete([10., 20., 30., 40., 50., 100.]))
config_factor_vae = h.zipit([model_name, gammas, model_fn, discr_fn])
# DIP-VAE-I config.
model_name = h.fixed("model.name", "dip_vae_i")
model_fn = h.fixed("model.model", "@dip_vae()")
lambda_od = h.sweep("dip_vae.lambda_od",
h.discrete([1., 2., 5., 10., 20., 50.]))
lambda_d_factor = h.fixed("dip_vae.lambda_d_factor", 10.)
dip_type = h.fixed("dip_vae.dip_type", "i")
config_dip_vae_i = h.zipit(
[model_name, model_fn, lambda_od, lambda_d_factor, dip_type])
# DIP-VAE-II config.
model_name = h.fixed("model.name", "dip_vae_ii")
model_fn = h.fixed("model.model", "@dip_vae()")
lambda_od = h.sweep("dip_vae.lambda_od",
h.discrete([1., 2., 5., 10., 20., 50.]))
lambda_d_factor = h.fixed("dip_vae.lambda_d_factor", 1.)
dip_type = h.fixed("dip_vae.dip_type", "ii")
config_dip_vae_ii = h.zipit(
[model_name, model_fn, lambda_od, lambda_d_factor, dip_type])
# BetaTCVAE config.
model_name = h.fixed("model.name", "beta_tc_vae")
model_fn = h.fixed("model.model", "@beta_tc_vae()")
betas = h.sweep("beta_tc_vae.beta", h.discrete([1., 2., 4., 6., 8., 10.]))
config_beta_tc_vae = h.zipit([model_name, model_fn
|
, betas])
all_models = h.chainit([
config_beta_vae, config_factor_vae, config_dip_vae_i, config_dip_vae_ii,
config_beta_tc_vae, config_annealed_beta_vae
])
return all_models
def get_config():
"""Returns the hyperparameter configs for different experiments."""
arch_enc = h.fixed("encoder.encoder_fn",
|
"@conv_encoder", length=1)
arch_dec = h.fixed("decoder.decoder_fn", "@deconv_decoder", length=1)
architecture = h.zipit([arch_enc, arch_dec])
return h.product([
get_datasets(),
architecture,
get_default_models(),
get_seeds(5),
])
class AbstractReasoningStudyV1(study.Study):
"""Defines the study for the paper."""
def get_model_config(self, model_num=0):
"""Returns model bindings and config file."""
config = get_config()[model_num]
model_bindings = h.to_bindings(config)
model_config_file = resources.get_file(
"config/abstract_reasoning_study_v1/stage1/model_configs/shared.gin")
return model_bindings, model_config_file
def get_postprocess_config_files(self):
"""Returns postprocessing config files."""
return list(
resources.get_files_in_folder(
"config/abstract_reasoning_study_v1/stage1/postprocess_configs/"))
def get_eval_config_files(self):
"""Returns evaluation config files."""
return list(
resources.get_files_in_folder(
"config/abstract_reasoning_study_v1/stage1/metric_configs/"))
|
BertrandBordage/django-tree
|
run_benchmark.py
|
Python
|
bsd-3-clause
| 233
| 0
|
#!/usr/bin/env python
import os
import django
if __name
|
__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'benchmark.settings')
django.setup()
from benchmark.base impor
|
t Benchmark
Benchmark().run()
|
Nodoka/Bioquality
|
graphing/tdwg_scatter.py
|
Python
|
mit
| 1,350
| 0.002963
|
#!/usr/local/bin/ipython -i
"""
A scatter graph of grid count vs grid area.
"""
import numpy as np
import matplotlib.pyplot as plt
# extract data from csv
file_name = "../data/tdwgsp_filtered.csv"
# columns (filtered):
# 1 - star_infs
# 2 - tdwgtotals
# 3 - tdwgareas
star_infs = np.genfromtxt(file_name, delimiter=',', dtype=None, ski
|
p_header=1, usecols=1)
tdwg_count = np.genfromtxt(file_name, deli
|
miter=',', dtype=None, skip_header=1, usecols=2)
tdwg_area = np.genfromtxt(file_name, delimiter=',', dtype=None, skip_header=1, usecols=3)
# remove "" from the text string
stars = [star[1:-1] for star in star_infs]
colours = map(lambda star_colour: 'k' if star_colour == 'BK' else 'y' if star_colour == 'GD' else 'b' if star_colour == 'BU' else 'g' if star_colour == 'GN' else 'w', stars)
fig = plt.figure()
ax = fig.add_subplot(111)
# grid_land/10000 to rescale the range
ax.scatter(tdwg_count, tdwg_area, c=colours, alpha=0.5)
ax.set_xlim(0, 250.1)
ax.set_ylim(0, 12000.1)
# uncomment to manually set ticks
# xtix = np.arange(0, 380000.1, 100000)
# ytix = np.arange(0, 1000.1, 200)
# ax.xaxis.set_ticks(xtix)
# ax.yaxis.set_ticks(ytix)
ax.set_xlabel('Number of TDWG Level 3 Code', fontsize=18)
ax.set_ylabel('Summed Area of TDWG Level 3 Code', fontsize=18)
ax.set_title('Species Geographic Range Size', fontsize=22)
ax.grid(True)
plt.show()
|
gigitux/lollypop
|
src/pop_next.py
|
Python
|
gpl-3.0
| 2,854
| 0
|
# Copyright (c) 2014-2015 Cedric Bellegarde <cedric.bellegarde@adishatz.org>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk
from lollypop.define import Lp, ArtSize, Shuffle
class NextPopover(Gtk.Popover):
"""
Popover with next track
"""
def __init__(self):
"""
Init popover
"""
Gtk.Popover.__init__(self)
self.set_modal(False)
self.get_style_context().add_class('osd-popover')
builder = Gtk.Builder()
builder.add_from_resource('/org/gnome/Lollypop/NextPopover.ui')
builder.connect_signals(self)
self.add(builder.get_object('widget'))
self._title_label = builder.get_object('title')
self._artist_label = builder.get_object('artist')
self._cover = builder.get_object('cover')
self._skip_btn = builder.get_object('skip_btn')
def update(self, player=None):
"""
Update widget with current track
"""
if Lp.player.is_party() or\
Lp.settings.get_enum('shuffle') == Shuffle.TRACKS:
self._skip_btn.show()
self._artist_label.set_text(Lp.player.next_track.artist)
self._title_label.set_text(Lp.player.next_track.title)
art = Lp.art.get_album(Lp.player.next_track.album_id,
ArtSize.MEDIUM)
if art is not None:
self._cover.set_fro
|
m_surface(art)
del art
self._cover.set_tooltip_text(Lp.player.next_track.album.name)
self._cover.show()
else:
self._cover.hide()
def do_show(self):
"""
Connect signal
"""
self._signal_id = Lp.player.connect('queue-changed', self.update)
Gtk.Popover.do_show(self)
def do_hide(self):
"""
Disconnect signal
"""
|
if self._signal_id is not None:
Lp.player.disconnect(self._signal_id)
Gtk.Popover.do_hide(self)
#######################
# PRIVATE #
#######################
def _on_skip_btn_clicked(self, btn):
"""
Skip next track
@param btn as Gtk.Button
"""
Lp.player.set_next()
Lp.player.emit('queue-changed')
|
ytec/instaforex-web
|
app/pages/admin.py
|
Python
|
gpl-3.0
| 201
| 0
|
from django.contrib import admin
fro
|
m cms.admin.pageadmin import PageAdmin
from cms.models import Page
from .models import page, Sub_Pages
admin.site.register(page)
admin.site.register(Sub_Pages
|
)
|
TylerTemp/tomorrow
|
lib/db/jolla.py
|
Python
|
gpl-3.0
| 11,853
| 0.000169
|
import pymongo
import logging
import time
# import sys
# import os
# sys.path.insert(0, os.path.normpath(os.path.join(__file__, '..', '..', '..')))
# from lib.db.base import Base
from .base import Base
logger = logging.getLogger('db.jolla')
client = pymongo.MongoClient()
db = client['jolla']
# TODO: support more site (maybe)
class User(Base):
collection = db.user
logger = logging.getLogger('jolla.db.user')
TOMORROW = 'https://tomorrow.comes.today'
DEACTIVE = 0
NORMAL = 1
ADMIN = 2
ROOT = 3
_default = {
'_id': None,
'source': None, # source site
'uid': None, # source site uid
'email': None,
'home': None,
'name': None,
'photo': None,
'type': NORMAL,
'zh': {}, # intro, donate
'en': {}
}
def __init__(self, _id=None, lang='zh'):
super(User, self).__init__()
self.__dict__['lang'] = lang
if _id is not None:
result = self.collection.find_one({'_id': _id})
if result is not None:
self.update(result)
def __str__(self):
return str(self.name)
def __getattr__(self, item):
if item == 'lang':
return self.__dict__['lang']
attrs = self.__dict__['__info__']
if item in ('intro', 'donate'):
target = attrs.get(self.lang, {})
return target.get(item, None)
return super(User, self).__getattr__(item)
def __setattr__(self, item, value):
if item == 'lang':
self.__dict__['lang'] = value
return
attrs = self.__dict__['__info__']
if item in ('intro', 'donate'):
target = attrs.setdefault(self.lang, {})
target[item] = value
return
return super(User, self).__setattr__(item, value)
def _before_save(self):
attrs = self.__dict__['__info__']
attrs.pop('token', None)
attrs.pop('expire_at', None)
return super(User, self)._before_save()
@classmethod
def by_source_id(cls, source, uid):
result = cls.collection.find_one(
{'source': source, 'uid': uid})
self = cls()
if result:
self.update(result)
else:
self.source = source
self.uid = uid
return self
@classmethod
def all(cls):
return cls.collection.find({})
class Article(Base):
collection = db.article
logger = logging.getLogger('jolla.db.article')
AWAIT = 0
ACCEPTED = 1
EJECTED = 2
_default = {
'_id': None,
'slug': None,
'status': AWAIT,
'author': None, # _id in User
'title': None,
'description': None,
'content': None,
'create_time': None,
'edit_time': None,
'cover': None,
'banner': None,
'tag': [],
'source': {}, # link, title, author
'en': {}, # title, description, content
}
def __init__(self, slug=None, lang='zh'):
super(Article, self).__init__()
if slug:
result = self.collection.find_one({'slug': slug})
if result is None:
self.slug = slug
else:
self.update(result)
self.lang = lang
def __getattr__(self, item):
if item == 'lang':
return self.__dict__.get('lang', 'zh')
default = self._default
attrs = self.__dict__['__info__']
if item in ('title', 'content', 'description') and self.lang == 'en':
en = self.en
if en:
return en[item]
else:
return None
# if item == 'slug' and attrs.get('slug', None) is None:
# link = self.source.get('link', None)
# if link is None:
# return None
# slug = link.split('/')[-1]
# if (slug.endswith('.html') or
# slug.endswith('.htm') or
# slug.endswith('.asp')):
# slug = ''.join(slug.split('.')[:-1])
# return slug
if item not in attrs and item in default:
default_val = default[item]
if default_val == {}:
attrs[item] = default_val = {} # re-bind to a new dict
elif default_val == []:
attrs[item] = default_val = []
return default_val
return super(Article, self).__getattr__(item)
def __setattr__(self, key, value):
if key == 'lang':
self.__dict__[key] = value
return
attrs = self.__dict__['__info__']
if self.lang == 'en' and key in ('title', 'content', 'description'):
target = attrs.setdefault('en', {})
target[key] = value
return
return super(Article, self).__setattr__(key, value)
def get(self, attr):
assert attr in ('title', 'description', 'content')
lang = self.lang
other_target = self.__info__ if lang == 'en' else self.en
result = getattr(self, attr, None)
if not result:
result = other_target.get(attr, None)
return result
def lang_fit(self):
lang = self.lang
if lang == 'en':
return lang in self.__dict__['__info__']
return lang == 'zh'
def other_lang(self):
lang = self.lang
if lang == 'en' and self.__info__.get('title', None):
return 'zh'
elif lang == 'zh' and self.__info__.get('en', None):
return 'en'
return None
def _before_save(self):
if self.create_time is None:
self.create_time = time.time()
self.edit_time = time.time()
return super(Article, self)._before_save()
@classmethod
def by_user_link(cls, uid, link):
result = cls.collection.find_one({'author': uid, 'source.link': link})
ins = cls()
if result:
ins.update(result)
else:
ins.author = uid
ins.source['link'] = link
return ins
@classmethod
def by_user(cls, uid):
retur
|
n cls.collection.find({'author': uid}).sort(
(
('create_t
|
ime', pymongo.DESCENDING),
)
)
@classmethod
def all(cls, offset=0, limit=None):
result = cls.collection.find({}).sort(
(
('create_time', pymongo.DESCENDING),
)
)
if limit is None:
return result[offset:]
return result[offset:offset + limit]
@classmethod
def all_shown(cls, offset=0, limit=None):
result = cls.collection.find({'status': cls.ACCEPTED}).sort(
(
('create_time', pymongo.DESCENDING),
)
)
if limit is None:
return result[offset:]
return result[offset:offset + limit]
@classmethod
def eject_except(cls, link, not_id, status=EJECTED):
collect = cls.collection
return collect.update_many(
{'_id': {'$ne': not_id}, 'source.link': link},
{'$set': {'status': status}}
)
class Author(Base):
collection = db.author
logger = logging.getLogger('jolla.db.author')
_default = {
'_id': None,
'name': None,
'photo': None,
'intro': None,
}
def __init__(self, name=None):
super(Author, self).__init__()
if name is not None:
result = self.collection.find_one({'name': name})
if result is None:
self.name = name
else:
self.update(result)
def __str__(self):
return str(self.name)
@classmethod
def all(cls):
return cls.collection.find({})
class Source(Base):
collection = db.source
logger = logging.getLogger('jolla.db.source')
_default = {
'_id': None,
'link': None, # required. Other attrs are only for suggestion
'title': None,
'author': None,
'banner': None,
'cover': None,
'tag': [],
'create_time': None,
|
flypy/flypy
|
flypy/tests/test_control_flow.py
|
Python
|
bsd-2-clause
| 2,894
| 0.001037
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
import unittest
from flypy import jit
class TestControlFlow(unittest.TestCase):
def test_loop_carried_dep_promotion(self):
@jit
def f(n):
sum = 0
for i in range(n):
sum += float(i)
return sum
self.assertEqual(f(10), 45.0)
def test_nested_rectangular(self):
@jit
def f(n):
sum = 0
for i in range(n):
for j in range(n):
for k in range(n):
sum += i * j
return sum
self.assertEqual(f(3)
|
, f.py_func(3))
def test_for_continue(self):
@jit
def f(n):
sum = 0
for i in range(n):
if i > n - 4:
continue
sum += i
return sum
self.assertEqual(f(10), f.py_func(10)
|
)
def test_for_break(self):
@jit
def f(n):
sum = 0
for i in range(n):
if i > n - 4:
break
sum += i
return sum
self.assertEqual(f(10), f.py_func(10))
def test_while_continue(self):
@jit
def f(n):
i = sum = 0
while i < n:
i += 1
if i > n - 4:
continue
sum += i
return sum
self.assertEqual(f(10), f.py_func(10))
def test_while_break(self):
@jit
def f(n):
i = sum = 0
while i < n:
if i > n - 4:
break
sum += i
i += 1
return sum
self.assertEqual(f(10), f.py_func(10))
def test_moderately_complicated(self):
@jit
def f(n):
i = 0
sum = 0
for i in range(n):
if i % 4 > 2:
while i > 0:
sum += i
i -= 1
return sum
self.assertEqual(f(10), f.py_func(10))
def test_complicated(self):
@jit
def f(n):
sum = 0
for i in range(n):
if i % 4 > 2:
while i > 0:
for j in range(n):
i -= 1
for k in range(n):
while k != 0:
sum += i * j
break
else:
continue
break
return sum
self.assertEqual(f(3), f.py_func(3))
if __name__ == '__main__':
#TestControlFlow('test_reduction').debug()
unittest.main(verbosity=3)
|
RyanChinSang/LeagueLatency
|
History/Raw/v2.2a Stable/LL.py
|
Python
|
gpl-3.0
| 21,253
| 0.00494
|
import os
import sys
import math
import errno
import subprocess
import tkMessageBox
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from PIL import Image
from matplotlib import style
from datetime import datetime
from matplotlib.widgets import RadioButtons, Button
# Update the version of the program here:
version = "2.2a"
# 'state' is used to keep track of weather the graph has been paused or not
state = 0
# Global arrays that keep the data for plotting the graphs
ltimes = []
wtimes = []
btimes = []
lpings = []
wpings = []
bpings = []
avg_lis = []
top = []
bot = []
# Global variables
sd = 0
avg = 0
num_to = 0 # number of timeout errors
num_un = 0 # number of unreachable errors
sum_ping = 0
min_ping = float('+inf')
max_ping = float('-inf')
count_na = 0
sum_ping_na = 0
sum_sq_dif_na = 0
min_ping_na = float('+inf')
max_ping_na = float('-inf')
count_lan = 0
sum_ping_lan = 0
sum_sq_dif_lan = 0
min_ping_lan = float('+inf')
max_ping_lan = float('-inf')
start = datetime.now()
sq_dif_ar = []
servers = {"NA": "104.160.131.3", "LAN": "104.160.136.3"}
# matplotlib related variable initialization
style.use('seaborn-darkgrid')
fig = plt.figure(figsize=(16, 9))
ax1 = fig.add_subplot(1, 1, 1)
pp_img = Image.open(os.path.dirname(__file__) + '/static/buttons/pp_button.png')
dec_img = Image.open(os.path.dirname(__fi
|
le__) + '/static/buttons/dec.png')
inc_img = Image.open(os.path.dirname(__file__) + '/static/buttons/inc.png')
null_img = Image.open(os.path.dirname(__file__) + '/static/buttons/null.png')
stgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/stgd.png')
stwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/stwr.png')
stbd_img = Image.open(os.path.dirname(__
|
file__) + '/static/buttons/stbd.png')
unstgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstgd.png')
unstwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstwr.png')
unstbd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstbd.png')
unstlgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstlgd.png')
unstlwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstlwr.png')
unstlbd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstlbd.png')
vunstgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/vunstgd.png')
vunstwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/vunstwr.png')
vunstbd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/vunstbd.png')
pp_img.thumbnail((64, 64), Image.ANTIALIAS)
dec_img.thumbnail((16, 16), Image.ANTIALIAS)
inc_img.thumbnail((16, 16), Image.ANTIALIAS)
stgd_img.thumbnail((16, 16), Image.ANTIALIAS)
stwr_img.thumbnail((16, 16), Image.ANTIALIAS)
stbd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstgd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstwr_img.thumbnail((16, 16), Image.ANTIALIAS)
unstbd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstlgd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstlwr_img.thumbnail((16, 16), Image.ANTIALIAS)
unstlbd_img.thumbnail((16, 16), Image.ANTIALIAS)
vunstgd_img.thumbnail((16, 16), Image.ANTIALIAS)
vunstwr_img.thumbnail((16, 16), Image.ANTIALIAS)
vunstbd_img.thumbnail((16, 16), Image.ANTIALIAS)
icon_manager = mpl.pyplot.get_current_fig_manager()
icon_manager.window.wm_iconbitmap(os.path.dirname(__file__) + '/static/icons/icon.ico')
rax = plt.axes([0.881, 0.535, 0.089, 0.089], aspect='equal', frameon=True, axisbg='white')
radio = RadioButtons(rax, servers.keys())
radio_value = radio.value_selected
class ButtonHandler(object):
"""
Class created to handle button functionality via .on_clicked()
"""
ind = 0
def quit(self, event):
self.ind += 1
close_handler(event)
plt.draw()
def pause(self, event):
global state
self.ind -= 1
state += 1
plt.draw()
def make_databox(vpos, hpos, alpha, fc, ec):
"""
Creates a box of all equal dimensions to hold the text data at the side of the graph - uniformity!
vpos: vertical position float
hpos: horizontal position float
alpha: strength of the colour float
colour: colour of the box string
"""
return ax1.text(vpos, hpos, '______________.', transform=ax1.transAxes, alpha=0,
bbox={'alpha': alpha,
'pad': 5,
"fc": fc,
"ec": ec,
"lw": 2})
def close_handler(event):
"""
Safely shutdown all processes of this program whenever the window is closed by user.
"""
sys.exit()
def spperr_handler(err):
"""
Sub-process ping error handler
Handles common 'errors' we can expect from Window's ping.exe, which is accessed through a subprocess.
'errors' refer to unsuccessful pings.
"""
err_dict = {'Destination host unreachable': 'The destination was unreachable!\nPlease check your internet '
'connection and press Retry.',
'Request timed out': 'The destination took too long to respond!\nPlease check your internet connection '
'and press Retry.'
}
try:
if tkMessageBox.askretrycancel(err, err_dict[err]):
upd_data()
else:
sys.exit()
# This should never occur - this handles errors not in the err_dict (the expected errors).
# Could be useful if a very powerful err_handler was coded, where every line is passed through here.
except KeyError:
if tkMessageBox.showerror('Unknown Error', 'The condition under which this error occurred was unexpected!'):
sys.exit()
def set_savdir(sav_dir='Screenshots'):
"""
Configures the default mpl save directory for screenshots.
Checks if there is a folder named 'Screenshots' in root folder.
If there is no folder there named 'Screenshots', it creates the directory.
"""
if not os.path.isdir(os.path.join(os.path.dirname(__file__), sav_dir).replace('\\', '/')):
try:
os.makedirs(os.path.join(os.path.dirname(__file__), sav_dir).replace('\\', '/'))
except OSError as exc:
if not (exc.errno == errno.EEXIST and os.path.isdir(os.path.join(os.path.dirname(__file__),
sav_dir).replace('\\', '/'))):
raise
# Now that the directory for 'Screenshots' surely exists, set it as default directory.
mpl.rcParams["savefig.directory"] = os.path.join(os.path.dirname(__file__), sav_dir).replace('\\', '/')
def draw_ping(vpos, hpos, ping, up_bound, lo_bound, stdv, vpos_tb, hpos_tb, a_yellow, a_green, a_red):
"""
A powerful function that performs:
1- The specification of the databox which holds the ping data:
a. Inner (face) colour represents the ping range
b. Outer (edge) colour represents the ping state (spiked, below lo_bound etc.)
2- Drawing the circle that summarizes the state of the ping
vpos: the vertical position of the button it draws the ping circle in
hpos: the horizontal position of the button it draws the ping circle in
ping: the value of the current ping
used in data analysis and is a key factor to decide the state of the ping
up_bound: represents the ping + standard deviation
lo_bound: represents the ping - standard deviation
stdv: the standard deviation calculated in upd_data(), passed from animate(i)
vpos_tb: the vertical position of the databox which holds the ping data
hpos_tb: the horizontal position of the databox which holds the ping data
a_yellow: the strength of the databox colour for yellow
a_green: the strength of the databox colour for green
a_red: the strength of the databox colour for red
"""
global avg
# Ping is 'good'
if 0 <= ping <= 199:
# Ping is very unstable - has very large and frequent spikes
if stdv * 2 >= 0.3 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_green, fc="green", ec="red")
return
|
ntymtsiv/tempest
|
tempest/services/identity/v3/json/policy_client.py
|
Python
|
apache-2.0
| 2,378
| 0
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from tempest.common.rest_client import RestClient
from tempest import config
CONF = config.CONF
class PolicyClientJSON(RestClient):
def __init__(self, auth_provider):
super(PolicyClientJSON, self).__init__(auth_provider)
self.service = CONF.identity.catalog_type
self.endpoint_url = 'adminURL'
self.api_version = "v3"
def create_policy(self, blob, type):
"""Creates a Policy."""
post_body = {
"blob": blob,
"type": type
}
post_body = json.dumps({'policy': post_body})
resp, body = self.post('policies', post_body, self.headers)
body = json.loads(body)
return resp, body['policy']
def list_policies(self):
|
"""Lists the policies."""
resp, body = self.get('policies')
body = json.loads(body)
return resp, body['policies']
def get_policy(self, policy_id):
"""Lists out the given policy."""
url = 'policies/%s' % policy_id
resp, body = self.get(url)
|
body = json.loads(body)
return resp, body['policy']
def update_policy(self, policy_id, **kwargs):
"""Updates a policy."""
resp, body = self.get_policy(policy_id)
type = kwargs.get('type')
post_body = {
'type': type
}
post_body = json.dumps({'policy': post_body})
url = 'policies/%s' % policy_id
resp, body = self.patch(url, post_body,
self.headers)
body = json.loads(body)
return resp, body['policy']
def delete_policy(self, policy_id):
"""Deletes the policy."""
url = "policies/%s" % policy_id
return self.delete(url)
|
haxsaw/actuator
|
src/actuator/provisioners/example_resources.py
|
Python
|
mit
| 2,612
| 0.004977
|
#
# Copyright (c) 2014 Tom Carroll
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
'''
Example classes mainly used in testing.
'''
from actuator.infra import Provisionable
from actuator.modeling import ContextExpr
class ProvisionableWithFixer(Provisionable):
def _fix_arguments(self, provisioner=None):
for k, v in self.__dict__.items():
setattr(self, k, self._get_arg_value(v))
class Server(ProvisionableWithFixer):
def __init__(self, name, **kwargs):
super(Server, self).__init__(name)
self.provisionedName = None
object.__getattribute__(self, "__dict__").update(kwargs)
self.kwargs = kwargs
def get_init_args(self):
return ((self.name,), self.kwargs)
class Database(ProvisionableWithFixer):
def __init__(self, name, **kwargs):
super(Database, self).__init__(name)
self.provisionedName = None
self.port = None
self.adminUser = None
self.adminPassword = None
object.__getattribute__(self, "__dict__").update(
|
kwargs)
self.kwargs = kwargs
def get_init_args(self):
return ((self.name,), self.kwargs)
class Queue(ProvisionableWithFixer):
def __init__(self, name, **kwargs):
super(Queue, self).__init__(name)
self.provisionedName = None
s
|
elf.qmanager = None
self.host = None
self.port = None
object.__getattribute__(self, "__dict__").update(kwargs)
self.kwargs = kwargs
def get_init_args(self):
return((self.name,), self.kwargs)
|
wonkoderverstaendige/PyFL593FL
|
PyFL593FL/ui/__init__.py
|
Python
|
mit
| 117
| 0.017094
|
# -*- coding: utf-8 -*-
"""
Created on 05 Apr 2014 3:30 AM
@author: <'Ronny Eichler
|
'> ronny.eichler@gmai
|
l.com
UI
"""
|
jfinkels/networkx
|
examples/drawing/circular_tree.py
|
Python
|
bsd-3-clause
| 639
| 0.001565
|
import networkx as nx
import matplotlib.pyplot as plt
try:
import pygraphviz
from networkx.drawing.nx_agraph import graphviz_layout
except ImportError:
try:
import pydot
from net
|
workx.drawing.nx_pydot import graphviz_layout
except ImportError:
raise ImportError("This example needs Graphviz and either "
"P
|
yGraphviz or pydot")
G = nx.balanced_tree(3, 5)
pos = graphviz_layout(G, prog='twopi', args='')
plt.figure(figsize=(8, 8))
nx.draw(G, pos, node_size=20, alpha=0.5, node_color="blue", with_labels=False)
plt.axis('equal')
plt.savefig('circular_tree.png')
plt.show()
|
threema-ch/threema-msgapi-sdk-python
|
threema/gateway/bin/callback_server.py
|
Python
|
mit
| 4,446
| 0.0009
|
"""
The command line interface for the Threema Gateway Callback Server.
"""
import asyncio
import functools
import click
import logbook
import logbook.more
from threema.gateway import __version__ as _version
from threema.gateway import (
Connection,
util,
)
from threema.gateway.e2e import AbstractCallback
from threema.gateway.key import Key
_logging_handler = None
_logging_levels = {
1: logbook.CRITICAL,
2: logbook.ERROR,
3: logbook.WARNING,
4: logbook.NOTICE,
5: logbook.INFO,
6: logbook.DEBUG,
7: logbook.TRACE,
}
class Callback(AbstractCallback):
@asyncio.coroutine
def receive_message(self, message):
click.echo('Got message ({}): {}'.format(repr(message), message))
def aio_serve(close_func):
loop = asyncio.get_event_loop()
def decorator(func):
func = asyncio.coroutine(func)
def wrapper(*args, **kwargs):
# Start
click.echo('Starting')
open_result = loop.run_until_complete(func(*args, **kwargs))
click.echo('Started')
try:
loop.run_forever()
except KeyboardInterrupt:
pass
click.echo('Closing')
close_result = loop.run_until_complete(close_func(open_result))
loop.close()
click.echo('Closed')
return open_result, close_result
return functools.update_wrapper(wrapper, func)
return decorator
@click.group()
@click.option('-v', '--verbosity', type=click.IntRange(0, len(_logging_levels)),
default=0, help="Logging verbosity.")
@click.option('-c', '--colored', is_flag=True, help='Colourise logging output.')
@click.pass_context
def cli(ctx, verbosity, colored):
"""
Command Line Interface. Use --help for details.
"""
if verbosity > 0:
# Enable logging
util.enable_logging(level=_logging_levels[verbosity])
# Get handler class
if colored:
handler_class = logbook.more.ColorizedStderrHandler
else:
handler_class = logbook.StderrHandler
# Set up logging handler
handler = handler_class(level=_logging_levels[verbosity])
handler.push_application()
global _logging_handler
_logging_handler = handler
# Create context object
ctx.obj = {}
@cli.command(short_help='Show version information.', help="""
Show the current version of the Threema Gateway Callback Server.
""")
def version():
click.echo('Version: {}'.format(_version))
@asyncio.coroutine
def close_server(server_and_callback):
server, callback = server_and_callback
server.close()
yield from server.wait_closed()
yield from callback.close()
@cli.command(short_help='Start the callback server.', help="""
Start the Threema Gateway Callback Server.
FROM is the API identity and SECRET is the API secret.
CERT represents the path to a file in PEM format containing the SSL
certificate of the server.""")
@click.argument('identity')
@click.argument('secret')
@click.argument('private_key')
@click.argument('cert', type=click.Path(exists=True))
@click.option('-k', '--keyfile', type=click.Path(exists=True), help="""
Path to a file that contains the private key. Will be read from
CERTFILE if not present.""")
@click.option('-h', '--host', help='Bind to a specific host.')
@click.option('-p', '--port', default=443, help='Listen on a specific port.')
@aio_serve(close_server)
def serve(**arguments):
# Get arguments
identity = arguments['identity']
secret = arguments['secret']
private_key = util.read_key_or_key_file(arguments['private_key'], Key.Type.private)
certfile = arguments['cert']
keyfile = arguments.get('keyfile')
host = arguments.get('host')
port = arguments['port']
# Create connection and callback instances
connection = Connection(identity=identity, secret=secret, key=private_key)
callback = Callback(connection)
# Create server
coroutine = callback.create_server(certfile, keyfile=keyfile, host=host, port=port)
server = yield from coroutine
return server, callback
def main():
try:
cli()
except Except
|
ion as exc:
click.echo('An error occurred:', err=True)
click.echo(exc, err=True)
raise
finally:
if _logging_handler is not None:
_logging_hand
|
ler.pop_application()
if __name__ == '__main__':
main()
|
pwittchen/learn-python-the-hard-way
|
exercises/exercise36.py
|
Python
|
mit
| 105
| 0
|
# Exercise 36: Designing and debugging
# No code
|
# Read: http://learnpytho
|
nthehardway.org/book/ex36.html
|
power12317/weblate
|
weblate/trans/migrations/0011_add_file_format.py
|
Python
|
gpl-3.0
| 13,518
| 0.007843
|
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2013 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SubProject.file_format'
db.add_column('trans_subproject', 'file_format',
self.gf('django.db.models.fields.CharField')(default='auto', max_length=50),
keep_default=False)
def backwards(self, orm):
# Deleting field 'SubProject.file_format'
db.delete_column('trans_subproject', 'file_format')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lang.language': {
'Meta': {'ordering': "['name']", 'object_name': 'Language'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'nplurals': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'pluralequation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'trans.change': {
'Meta': {'ordering': "['-timestamp']", 'object_name': 'Change'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Unit']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'trans.check': {
'Meta': {'object_name': 'Check'},
'check': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'checksum': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignore': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lang.Language']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Project']"})
},
'trans.dictionary': {
'Meta': {'ordering': "['source']", 'object_name': 'Dictionary'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lang.Language']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Project']"}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'target': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'trans.indexupdate': {
'Meta': {'object_name': 'IndexUpdate'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Unit']"})
},
'trans.project': {
'Meta': {'ordering': "['name']", 'object_name': 'Project'},
'commit_message': ('django.db.models.fields.CharField', [], {'default': "'Translated using Weblate.'", 'max_length': '200'}),
'committer_email': ('django.db.models.fields.EmailField', [], {'default': "'noreply@weblate.org'", 'max_length': '75'}),
'committer_name': ('django.db.models.fields.CharField', [], {'default': "'Weblate'", 'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instructions': ('dj
|
ango.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'mail': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'merge_style': ('django.db.models.fields.CharField', [], {'default': "'merge'", 'max_length': '10'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1
|
00'}),
|
Emma926/paradnn
|
test.py
|
Python
|
apache-2.0
| 4,858
| 0.011116
|
''' A self-contained test file.
@author Emma Wang
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.contrib.tpu.python.tpu import tpu_config
from tensorflow.contrib.tpu.python.tpu import tpu_estimator
from tensorflow.contrib.tpu.python.tpu import tpu_optimizer
import time
import os
tf.flags.DEFINE_bool("use_tpu", True, "Use TPUs or not.")
# Cloud TPU Cluster Resolvers
tf.flags.DEFINE_string(
"gcp_project", default="",
help="Project name for the Cloud TPU project. If not specified, "
"the GCE project metadata will be used.")
tf.flags.DEFINE_string(
"tpu_zone", default="",
help="GCE zone where the Cloud TPU is located in.")
tf.flags.DEFINE_string(
"tpu_name", default="",
help="Name of the Cloud TPU for Cluster Resolvers.")
tf.flags.DEFINE_integer("iterations", 10,
"Number of iterations per TPU training loop.")
tf.flags.DEFINE_integer("num_shards", 8, "Number of shards (TPU chips).")
tf.flags.DEFINE_string("model_dir", None, "Estimator model_dir")
tf.flags.DEFINE_integer("batch_size", 128,
"Global batch size for the training")
tf.flags.DEFINE_integer("train_steps", 100,
"Total number of steps.")
input_dim = [224, 224, 3]
output_dim = 1000
FLAGS = tf.flags.FLAGS
def get_input_fn(batch_size, input_dim, output_dim):
def input_fn(params):
size = [batch_size]
for i in input_dim:
size.append(i)
images = tf.random_uniform(
size, minval=-0.5, maxval=0.5, dtype=tf.float32)
labels = tf.random_uniform(
[batch_size], maxval=output_dim, dtype=tf.int32)
labels = tf.one_hot(labels, output_dim)
return images, labels
return input_fn
def model_fn(features, labels, mode, params):
output_dim = params['output_dim']
net = features
shp = net.get_shape().as_list()
flattened_shape = shp[1] * shp[2] * shp[3]
net = tf.reshape(net, [shp[0], flattened_shape])
net = tf.layers.dense(
inputs=net,
units=4,
activation=tf.nn.relu)
net = tf.layers.dropout(
inputs=net,
rate=0.5)
net = tf.layers.dense(
inputs=net,
units=output_dim,
activation=None)
loss = tf.losses.softmax_cross_entropy(
onehot_labels=labels, logits=net)
learning_rate = tf.train.exponential_decay(
0.01, tf.train.get_global_step(), 25000, 0.97)
if FLAGS.use_tpu:
optimizer = tpu_optimizer.CrossShardOptimizer(
tf.train.GradientDescentOptimizer(learning_rate=learning_rate))
else:
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step())
param_stats = tf.profiler.profile(
tf.get_default_gra
|
ph(),
options=ProfileOptionBuilder.trainable_variables_parameter())
fl_stats = tf.profiler.profile(
tf.get_default_graph(),
options=tf.profiler.ProfileOptionBuilder.fl
|
oat_operation())
return tpu_estimator.TPUEstimatorSpec(
mode=mode,
loss=loss,
train_op=train_op)
ProfileOptionBuilder = tf.profiler.ProfileOptionBuilder
def main(unused_argv):
start = time.time()
tf.logging.set_verbosity(tf.logging.INFO)
if FLAGS.use_tpu:
tf.logging.info("Using TPUs.")
else:
tf.logging.info("NOT using TPUs.")
if FLAGS.use_tpu:
tf.logging.info('tpu name:', FLAGS.tpu_name)
if FLAGS.tpu_name is None:
raise RuntimeError("You must specify --tpu_name.")
else:
if '1.6.0' in tf.__version__:
tpu_cluster_resolver = (
tf.contrib.cluster_resolver.TPUClusterResolver(
tpu_names=[os.uname()[1]],
zone=FLAGS.tpu_zone,
project=FLAGS.gcp_project))
else:
tpu_cluster_resolver = (
tf.contrib.cluster_resolver.TPUClusterResolver(
os.uname()[1],
zone=FLAGS.tpu_zone,
project=FLAGS.gcp_project))
tpu_grpc_url = tpu_cluster_resolver.get_master()
else:
tpu_grpc_url = ''
run_config = tpu_config.RunConfig(
master=tpu_grpc_url,
evaluation_master=tpu_grpc_url,
model_dir=FLAGS.model_dir,
save_checkpoints_secs=None,
tpu_config=tpu_config.TPUConfig(iterations_per_loop=FLAGS.iterations, num_shards=FLAGS.num_shards),
)
estimator = tpu_estimator.TPUEstimator(
model_fn=model_fn,
params={"bs": FLAGS.batch_size, "output_dim": output_dim, "input_dim": input_dim},
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.batch_size,
config=run_config)
estimator.train(input_fn=get_input_fn(FLAGS.batch_size, input_dim, output_dim), max_steps=FLAGS.train_steps)
total = time.time() - start
tf.logging.info("Total time: " + str(total))
if __name__ == "__main__":
tf.app.run()
|
lipixun/newsanalyzer4w
|
newsanalyzer/utils.py
|
Python
|
gpl-3.0
| 329
| 0.006116
|
# encoding=utf8
# pylint: disable=W0611
""" The utility
|
Author: lipixun
Created Time : 日 2/12 14:14:50 2017
File Name: utils.py
Description:
"""
from spec import DataPath
# Import json
try:
import simplejson as json
except ImportError:
import json
# NLTK
import nltk
nltk.data.path = [ DataPa
|
th ]
|
johnskopis/naglib
|
naglib/config/command.py
|
Python
|
mit
| 342
| 0.002924
|
#!/usr/bin/env python
from base import *
""" A representation of a nagios service dependency"""
class Command(BaseObject):
TYPE = 'command'
TEMPLATE_CLASS = None
PARAMS = (
'command_name',
'command_line'
)
REQUIRED_PARAMS = PARAMS
@property
def identity(self
|
):
return self.command_name
|
|
ddimensia/RaceCapture_App
|
autosportlabs/racecapture/views/configuration/rcp/wireless/bluetoothconfigview.py
|
Python
|
gpl-3.0
| 3,672
| 0.004085
|
import kivy
kivy.require('1.9.1')
from kivy.app import Builder
from kivy.uix.gridlayout import GridLayout
from kivy.properties import ObjectProperty
from kivy.logger import Logger
from settingsview import SettingsView, SettingsSwitch, SettingsButton
from autosportlabs.widgets.separator import HLineSeparator
from autosportlabs.racecapture.views.util.alertview import editor_popup
from autosportlabs.racecapture.views.configuration.rcp.advancedbluetoothconfigview import AdvancedBluetoothConfigView
Builder.load_string('''
<BluetoothConfigView>
id: bluetooth
cols: 1
spacing: [0, dp(20)]
size_hint: [1, None]
height: self.minimum_height
HSeparator:
text: 'Bluetooth'
size_hint_y: None
SettingsView:
id: bt_enable
label_text: 'Bluetooth'
help_text: 'If the Bluetooth module is connected, enable it here'
SettingsView:
id: btconfig
label_text: 'Advanced configuration'
help_text: 'Change Bluetooth name and passkey. Firmware version 2.9.0 or greater required.'
''')
class BluetoothConfigView(GridLayout):
def __init__(self, config, **kwargs):
super(BluetoothConfigView, self).__init__(**kwargs)
self.config = None
self.register_event_type('on_modified')
self.config_updated(config)
self._bt_popup = None
self._bt_config_view = None
btConfig = self.ids.btconfig
btConfig.bind(on_setting=self.on_bt_configure)
btConfig.setControl(SettingsButton(text='Advanced'))
def on_bluetooth_enabled_change(self, instance, value):
if self.config:
self.config.connectivityConfig.bluetoothConfig.btEnabled = value
self.config.connectivityConfig.stale = True
self.dispatch('on_modified')
def config_updated(self, config):
self.conf
|
ig = config
value = self.config.connectivityConfig.bluetoothConfig.btEnabled
bluetooth_enabled = self.ids.bt_enable
bluetooth_enabled.setControl(SettingsSwitch(active=value))
bluetooth_enabled.control.bind(active=self.on_bluetooth_enabled_change)
def on_modified(self):
pass
def on
|
_bt_configure(self, instance, value):
if not self._bt_popup:
content = AdvancedBluetoothConfigView(self.config.connectivityConfig)
popup = editor_popup(title="Configure Bluetooth", content=content,
answerCallback=self.on_bluetooth_popup_answer)
self._bt_popup = popup
self._bt_config_view = content
def on_bluetooth_popup_answer(self, instance, answer):
close = True
modified = False
# If the user clicked the checkbox to save, validate the view. If it's valid, close and save values to config.
# If invalid, leave it (view will show error messages)
if answer:
valid = self._bt_config_view.validate()
if valid:
bt_values = self._bt_config_view.values
if len(bt_values["name"]) > 0:
self.config.connectivityConfig.bluetoothConfig.name = bt_values["name"]
modified = True
if len(bt_values["passkey"]) > 0:
self.config.connectivityConfig.bluetoothConfig.passKey = bt_values["passkey"]
modified = True
else:
close = False
if modified:
self.config.connectivityConfig.stale = True
self.dispatch('on_modified')
if close:
self._bt_popup.dismiss()
self._bt_popup = None
self._bt_config_view = None
|
stetie/postpic
|
postpic/_compat/functions.py
|
Python
|
gpl-3.0
| 3,618
| 0.000553
|
#
# This file is part of postpic.
#
# postpic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# postpic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with postpic. If not, see <http://www.gnu.org/licenses/>.
#
# Stephan Kuschel 2017
# Alexander Blinne, 2017
"""
This module provides compatibility replacements of functions from external
libraries which have changed w.r.t. older versions of these libraries or
were not present in older versions of these libraries
"""
import numpy as np
import scipy as sp
import scipy.signal as sps
import collections
try:
from collections.abc import Iterable
except ImportError:
from collections import Iterable
def np_meshgrid(*args, **kwargs):
if len(args) == 0:
return tuple()
if len(args) == 1:
if kwargs.get('copy', False):
return (args[0].copy(),)
return (args[0].view(),)
return np.meshgrid(*args, **kwargs)
def np_broadcast_to(*args, **kwargs):
array, shape = args
a, b = np.broadcast_arrays(array, np.empty(shape), **kwargs)
return a
def np_moveaxis(*args, **kwargs):
a, source, destination = args
# twice a quick implementation of numpy.numeric.normalize_axis_tuple
if not isinstance(source, Iterable):
source = (source,)
if not isinstance(destination, Iterable):
destination = (destination,)
source = [s % a.ndim for s in source]
destination = [d % a.ndim for d in destination]
# the real work copied from np.moveaxis
order = [n for n in range(a.ndim) if n not in source]
for dest, src in sorted(zip(destination, source)):
order.insert(dest, src)
return np.transpose(a, order)
def sps_tukey(M, alpha=0.5, sym=True):
"""
Copied from scipy commit 870abd2f1fcc1fcf491324cdf5f78b4310c84446
and replaced some functions by their implementation
"""
if int(M) != M or M < 0:
raise ValueError('Window length M must be a non-negative integer')
if M <= 1:
return np.ones(M)
if alpha <= 0:
return np.ones(M, 'd')
elif alpha >= 1.0:
return hann(M, sym=sym)
if not sym:
M, needs_trunc = M + 1, True
else:
M, needs_trunc = M, False
n = np.arange(0, M)
width = int(np.floor(alpha*(M-1)/2.0))
n1 = n[0:width+1]
n2 = n[width+1:M-width-1]
n3 = n[M-width-1:]
w1 = 0.5 * (1 + np.cos(np.pi * (-1 + 2.0*n1/alpha/(M-1))))
w2 = np.ones(n2.shape)
w3 = 0.5 * (1 + np.cos(np.pi * (-2.0/alpha + 1 + 2.0*n3/alpha/(M-1))))
w = np.concatenate((w1, w2, w3))
if needs_trunc:
return w[:-1]
else:
return w
ReplacementFunction = collections.namedtuple('ReplacementFunction', ['name', 'originalmodule',
'replacement',
|
'lib',
'minver'])
replacements = [
ReplacementFunction('meshgrid', np, np_meshgrid, np, '1.9'),
ReplacementFunction('broadcast_to', np, np_broadcast_to, np, '1.10'),
ReplacementFunction('mov
|
eaxis', np, np_moveaxis, np, '1.11'),
ReplacementFunction('tukey', sps, sps_tukey, sp, '0.16')
]
|
jazzband/site
|
jazzband/projects/commands.py
|
Python
|
mit
| 1,877
| 0.000533
|
import logging
import click
import click_log
from flask.cli import with_appcontext
from ..account import github
from . import tasks
from .models import Project
logger = logging.getLogger(__name__)
click_log.basic_config(logger)
@click.command("projects")
@click_log.simple_verbosity_option(logger)
@with_appcontext
def sync_projects():
"Syncs projects"
tasks.sync_projects()
@click.command("project_members")
@click_log.simple_verbosity_option(logger)
@with_a
|
ppcontext
def sync_project_members():
"Syncs project members"
tasks.sync_project_members()
|
@click.command("new_upload_notifications")
@click.option("--project_id", "-p", default=None)
@click_log.simple_verbosity_option(logger)
@with_appcontext
def send_new_upload_notifications(project_id):
tasks.send_new_upload_notifications(project_id)
@click.command("project_team")
@click.argument("name")
@click_log.simple_verbosity_option(logger)
@with_appcontext
def sync_project_team(name):
"Syncs (create/populate) project team"
project = Project.query.filter(Project.name == name).first()
team_response = github.get_project_team(project.name)
if team_response.status_code == 404:
logger.info(f"Project team {name} doesn't exist yet. Creating..")
team_response = project.create_team()
elif team_response.status_code == 200:
logger.info(f"Project team {name} already exists.")
team_response.raise_for_status()
if team_response:
team_data = team_response.json()
for lead in project.lead_members.all():
logging.info(f"Adding @{lead.login} to project team {name}")
member_response = github.join_team(team_data["slug"], lead.login)
member_response.raise_for_status()
else:
logging.error(
f"Something went wrong while syncing project team for project {name}"
)
|
joke2k/faker
|
faker/providers/job/fi_FI/__init__.py
|
Python
|
mit
| 6,120
| 0
|
from .. import Provider as BaseProvider
class Provider(BaseProvider):
# jobs parsed from a list provided by State Treasury:
# http://www.valtiokonttori.fi/download/noname/%7BF69EA5BD-C919-49FE-8D51-91434E4B030D%7D/82158
jobs = [
"Agrologi",
"Aikuiskoulutusjohtaja",
"Aineenopettaja",
"Ajojärjestelijä",
"Akatemian tutkijatohtori",
"Aktuaari",
"Alakoulujen apulaisrehtori",
"Alikersantti",
"Alkoholiasiain ylitarkastaja",
"Back office asiantuntija",
"Ballistikko",
"Bioanalyytikko",
"Brand manager",
"Budjettiassistentti",
"Business controller",
"Cc-yritysneuvoja",
"Cert-fi -yksikön päällikkö",
"Communication officer",
"Consul",
"Counsellor",
"Data-analyytikko",
"Dekaanin sihteeri",
"Dieettikeittäjä",
"Digitaalisen kokeen toteuttaja",
"Diplomi-insinööri",
"Dokumentoija",
"Dosentti",
"Eakr-koordinaattori",
"Editoija",
"Edunvalvontasihteeri",
"Egr-ohjaaja",
"Ekokampuskoordinaattori",
"Elektroniikka-asentaja",
"Elinkeinopäällikkö",
"Elokuvakonemestari",
"Elputeknikko",
"Eläinlääkintöneuvos",
"Faktori",
"Farmakologi",
"Fidipro-professori",
"Filmiteknikko",
"Financial controller",
"Floristi",
"Fysioterapeutti",
"Fyysikko",
"Gemmologi",
"Gentax-järjestelmäasiantuntija",
"Geofyysikko",
"Gis-asiantuntija",
"Gm huto pääkäyttäjä",
"Graafikko",
"Haastattelija",
"Hakukoordinaattori",
"Hallimestari",
"Hammashoitaja",
"Hankearkkitehti",
"Harjaantumisopetuksen erityisluokanopettaja",
"Havainnontarkastaja",
"Helikopterihuoltoaliupseeri",
"Henkikirjoittaja",
"Johtava kuluttajaoikeusneuvoja",
"Ict-arkkitehti",
"Ihmisoikeuskeskuksen johtaja",
"Iktyonomi",
"Ilma-aluksen päällikkö",
"Iltapäiväkerhon ohjaaja",
"Immunologi",
"Info-palvelupisteen hoitaja",
"Innoittaja",
"Jakeluvastaava",
"Jalkaväen tarkastaja",
"Jaoksen johtaja",
"Jatkokoulutettava eläinlääkäri",
"Jhs-projektipäällikkö",
"Johdon asiantuntija",
"Joukkoliikenneasiantuntija",
"Julkaisu- ja markkinointisuunnittelija",
"Junamies",
"Juontaja",
"Kaapeli-insinööri",
"Kabinettisihteeri",
"Kadettikoulun johtaja",
"Kahvila-apulainen",
"Kairaaja",
"Kalabiologi",
"Kampanjapäällikkö",
"Kanavanhoitaja",
"Kapellimestari",
"Karjamestari",
"Laadunvarmistuksen asiantuntija",
"Laboraattori",
"Laillisuusvalvontasihteeri",
"Laki- ja henkilöstöasiainjohtaja",
"Lapsiasiavaltuutettu",
"Laskennan kehittämispäällikkö",
"Lataamoinsinööri",
"Lautakuntasihteeri",
"Lavastaja",
"Maa- ja vesirakennustyöntekijä",
"Maisema-arkkitehti",
"Majakkateknikko",
"Maksatusasiantuntija",
"Malli",
"Mareografihoitaja",
"Mastoteknikko",
"Matemaatikko",
"Media- ja kulttuurikoordinaattori",
"Neuropsykologi",
"Nimikkeistöpäällikkö",
"Nosturinkuljettaja",
"Notaari",
"Nukutuslääkäri",
"Numerointisihteeri",
"Nuorempi konstaapeli",
"Näytearkistonhoitaja",
"Näönkäytön asiantuntija",
"Obduktiokoordinaattori",
"Observaattori",
"Offset-monistaja",
"Ohjaaja",
"Oikaisulautakunnan puheenjohtaja",
"Oleskelulupakäsittelijä",
"Omistajaohjausyksikön johtaja",
"Ompelija",
"Opas",
"Operaatiopäällikkö",
"Padonhoitaja",
"Paikallisjohtaja",
"Pakolaiskeskuksen johtaja",
"Palkanlaskentapäällikkö",
"Panostaja",
"Paperikonservaattori",
"Parturi-kampaaja",
"Passi- ja maahantulolupavirkailija/toimistovirkailija",
"Pataljoonan komentaja",
"Pedagogi",
"Radioasentaja",
"Rahakammion johtaja",
"Raideliikennejohtaja",
"Rajaeläinlääkäri",
"Rakennemuutosjohtaja",
"Raportoinnin asiantuntija",
"Ratainsinööri",
"Rauhanturvaaja",
"Ravintohaastattelija",
"Rehtori",
"Saamelaisarkistonhoitaja",
"Sadehavainnontekijä",
"Sairaala-apulainen",
"Saksan, englannin ja ruotsinkielen lehtori",
"Salkunhoitaja",
"Sanomakeskusaliupseeri",
"Satamapäällikkö",
"Seismologi",
"Sektorijohtaja",
"Selvittelijä",
"Taajuussuunnittelija",
"Taideamanuenssi",
"Tallentaja",
"Tanssija",
"Tapahtumakoordinaattori",
"Tarjoilija",
"Tasa-arvoneuvos",
"Tavaraliikennelupakäsittelijä",
"Team finland kasvu- ja kansainvälistymiskoordinaattori",
"Teemapäällikkö",
"Ulkoasiainneuvos",
"Ulosottojohtaja",
"Ultraäänihoitaja",
"Unix
|
-asiantuntija",
"Upseeri",
"Urakonsultti",
"Urheiluohjaaja",
"Vaaitsija",
"Vac-yhdyshenkilö",
"Vahingonkorvausasiantuntija",
"Vaihteenhoitaja",
"Vakuustoimittaja",
"Valaistusmestari",
"Vammaisasiamies",
"Vanhempi tutkijainsinööri",
"Vapaa-ajan ohjaaja",
"Varadekaani",
"Www-
|
asiantuntija",
"Yhdenvertaisuusvaltuutettu",
"Yhteinen tuntiopettaja",
"Yksikkösihteeri",
"Yleinen edunvalvoja",
"Yliaktuaari",
"Ylläpidon palvelupäällikkö",
"Yläasteen rehtori",
"Ympärintönsuojeluyksikön päällikkö",
"Yrittäjyysneuvoja",
"Yva-koordinaattori",
]
|
LasLabs/python-helpscout
|
helpscout/tests/test_apis_tags.py
|
Python
|
mit
| 1,513
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2017-TODAY LasLabs Inc.
# License MIT (https://opensource.org/licenses/MIT).
from .api_common import ApiCommon, recorder
class TestApisTags(ApiCommon):
"""Tests the Tags API endpoint."""
def setUp(self):
super(TestApisTags, self).setUp()
self.__endpoint__ = self.api.Tags
@recorder.use_cassette()
def test_apis_tags_get(self):
"""It should not be implemented."""
with self.assertRaises(NotImplementedError):
self.__endpoint__.update(None)
@recorder.use_cassette()
def test_apis_tags_delete(self):
"""It should not be implemented."""
with self.asse
|
rtRaises(NotImplementedError):
self.__endpoint__.delete(None)
@recorder.use_cassette()
def test_apis_tags_update(self):
"""It should not be implemented."""
with self.assertRaises(NotImplementedError):
self.__endpoint__.update(None)
@recorder.use_cassette()
def test_apis_tags_create(self):
"""It should not be implemented."""
with self.assertRaises(NotImplementedError):
|
self.__endpoint__.create(None)
@recorder.use_cassette()
def test_apis_tags_list(self):
"""It should list the tags in the tag."""
self._test_list()
@recorder.use_cassette()
def test_apis_tags_search(self):
"""It should not be implemented."""
with self.assertRaises(NotImplementedError):
self.__endpoint__.search([], None)
|
andres00157/Curso-de-javeriana
|
4.py
|
Python
|
apache-2.0
| 1,335
| 0.03221
|
print ("suma de los digitos de un numero")
print ("de cuantos digitos quere trabajar")
a = int(raw_input("numero de dijitos"))
if a == 2 :
print("escribe el numero")
b = int(raw_input("numero="))
c = b/10
d = b%10
print (c + d )
if a == 3 :
print("escribe el numero")
b = int(raw_input("numero="))
c = b/10
d = b%10
p = c/10
q = c%10
print (q + d + p )
if a == 4 :
print("escribe el numero")
b = int(raw_input("numero="))
c = b/10
d = b%10
p = c/10
q = c%10
u = p / 10
o = p % 10
print (q + d + u + o )
if a == 5 :
print("escribe el numero")
b = int(raw_input("numero="))
c = b/10
d = b%10
p = c/10
q = c%10
u = p / 10
o = p % 10
i = u/10
e = u%10
print (q + d + o + i + e )
if a == 6 :
print("escribe el numero")
b = int(raw_input("numero="))
c = b/10
d = b%10
p = c/10
q = c%10
u = p / 10
o = p % 10
i = u/10
e = u%10
m = i/10
n = i%10
print (q + d + o + e + m + n
|
)
if a == 7 :
print("escribe el numero")
b = int(raw_input("numero="))
c = b/10
d = b%10
p = c/10
q = c%10
u = p / 10
o = p % 10
i = u/10
e = u%10
m = i/10
n = i%10
l = m/10
j = m%10
print (q + d + o + e + n + j + l )
| |
shadowmint/nwidget
|
lib/cocos2d-0.5.5/test/test_remove.py
|
Python
|
apache-2.0
| 1,052
| 0.024715
|
# This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path
|
.join(os.path.dirname(__file__), '..'))
#
testinfo = "s, t 5.1, s, q"
|
tags = "CocosNode.remove"
import cocos
from cocos.director import director
from cocos.sprite import Sprite
from cocos.actions import *
import pyglet
class TestLayer(cocos.layer.Layer):
def __init__(self):
super( TestLayer, self ).__init__()
x,y = director.get_window_size()
self.sprite = Sprite('grossini.png', (x/2, y/2))
self.sprite2 = Sprite('grossini.png', (x/4, y/2))
self.add( self.sprite )
self.add( self.sprite2, name="grossini" )
def rem():
self.remove( self.sprite )
self.remove( "grossini" )
self.do( Delay(5) + CallFunc( rem ) )
def main():
director.init()
test_layer = TestLayer ()
main_scene = cocos.scene.Scene (test_layer)
director.run (main_scene)
if __name__ == '__main__':
main()
|
vallemrv/tpvB3
|
cloud/contabilidad/__init__.py
|
Python
|
apache-2.0
| 192
| 0
|
# @Author: Manuel Rodriguez <valle>
# @Date: 01-Jan-2018
# @Email: valle.mrv@gmail.c
|
om
# @Last modified by: valle
# @Last modified time:
|
07-Jan-2018
# @License: Apache license vesion 2.0
|
Bobstin/AutomatedBrewery
|
automatedbrewery/PID.py
|
Python
|
mit
| 23,167
| 0.01869
|
import time
import numpy
#Based heavily on the Arduino PID library by Brett Beauregard
# By default, looks for an attribute called value for the input, and setting for the output
# If you want to change that, then you can change the input/outputAttributeName
# Input source must be available when PID is initialized to get the starting value
# assumes that the output device is regularly checking a pipe to get the next value to go to
class PID(object):
def __init__(self,inputSource,inputAttributeName='value'):
#creates placeholders for the PID values. These are allowed to be empty, but run() will
#check if they are filled, and fail if they are not
self.setPoint = None
self.Kp = None
self.Ki = None
self.Kd = None
self.outputPipeConn = None
self.outputMin = None
self.outputMax = None
self.output = None
self.cycleTime = None
self.semiAutoValue = None
self.inputPipeConn = None
self.tempGraphSignal = None
self.messageSignal = None
self.inputSource = inputSource
self.outputAttributeName = 'setting'
self.inputAttributeName = inputAttributeName
self.lastInput = getattr(self.inputSource,self.inputAttributeName)
if self.lastInput == None: raise ValueError('Unable to get input value. Check inputSource and inputAttributeName')
self.lastRun = time.time()*1000
self.integralWindupBlocker = False
self.integralTerm = 0
self._mode = 'Auto'
self.stop = 0
self.nextCycleStart =time.time()*1000
self.sentOffSignal = False
@property
def mode(self):
return self._mode
@mode.setter
def mode(self, value):
if (value == 'Auto') | (value == 'SemiAuto') | (value == 'Off'):
#If switching to Auto from another mode, reset the last input value
#If switching from SemiAuto to Auto, set the integral term to SemiAuto so
#the PID maintains that value
#If switching from Off to Auto, reset the integral term so it doesn't overshoot
if value == 'Auto':
self.lastInput = getattr(self.inputSource,self.inputAttributeName)
if self._mode == 'SemiAuto': self.integralTerm = self.semiAutoValue
else:
self.integralTerm = 0
self.inControllableRegion = False
self._mode = value
else:
self.printAndSendMessage('Error: mode must be Auto, SemiAuto, or Off. Not changing the mode.',"Alarm")
def checkPipe(self):
if self.inputPipeConn.poll():
data = self.inputPipeConn.recv()
if data[0] == "autoTune":
self.autoTune(data[1][0],data[1][1],data[1][2],data[1][3],data[1][4],data[1][5],data[1][6])
else:
setattr(self,data[0],data[1])
def printAndSendMessage(self,message,messageType):
print(message)
if self.messageSignal != None: self.messageSignal.emit(message,messageType)
def run(self):
#since you just started the run, resets stop to 0
self.stop = 0
#main PID loop
while self.stop == 0:
#If it is going to cycle, then calculate the time the next cycle should start. This reduces
#drift in time due to the time it takes to execute the code
if self.cycleTime != None:
self.nextCycleStart = time.time()*1000 + self.cycleTime
#prints the temperature
latestInput = getattr(self.inputSource,self.inputAttributeName)
if self.tempGraphSignal != None:
self.tempGraphSignal.emit(time.time()*1000,latestInput)
#calculates the new setting
self.calculateOutput()
#sends the output to the output pipe connection
if
|
self.outputPipeConn == None:
print('Error: outputPipeConn is not set')
self.stop = 1
else:
|
#print(self.output)
if self._mode != "Off": self.outputPipeConn.send((self.outputAttributeName,self.output))
else:
if not(self.sentOffSignal):
self.outputPipeConn.send((self.outputAttributeName,0))
#print("Sending off signal")
self.sentOffSignal = True
#checks the input pipe to see if anything needs to change
if self.inputPipeConn != None:
#print("Checking pipe")
self.checkPipe()
#waits until the next cycle
if self.cycleTime != None:
if time.time()*1000 > self.nextCycleStart:
self.printAndSendMessage('Error: I was not able to calculate the next output prior to the next cycle. Please set a longer cycle time (note that cycle time is measured in ms). Stopping the PID.',"Alarm")
self.stop = 1
else:
waittime = (self.nextCycleStart - time.time()*1000)/1000
time.sleep(waittime)
def calculateOutput(self):
#Performs checks to see if all parameters are set properly
if self.outputMin >= self.outputMax:
self.printAndSendMessage('Error: outputMin is greater than or equal to outputMax',"Alarm")
self.stop = 1
return
#if cycleTime is not set, then the PID will just run once (this is so that you can run the PID externally
#and not in its own loop). However, warn people that this is the case
if self.cycleTime == None:
self.printAndSendMessage('Warning: Cycle time is not set. Running PID a single time',"Warning")
self.stop = 1
#If mode is Off, don't do anything. If it is semiauto, then produce the set output
if self._mode == 'Off':
return
elif self._mode == 'SemiAuto':
self.sentOffSignal = False
if self.semiAutoValue == None:
self.printAndSendMessage ('Error: mode is set to SemiAuto, but semiAutoValue is not set',"Alarm")
self.stop = 1
return
else:
self.output = self.semiAutoValue
elif self._mode == 'Auto':
self.sentOffSignal = False
#checks that all parameters are set properly
if (self.Kp == None) | (self.Ki == None) | (self.Kd == None):
self.printAndSendMessage('Error: Kp, Ki, and Kd are not all set',"Alarm")
self.st
|
gazpachoking/Flexget
|
flexget/plugins/metainfo/metainfo_movie.py
|
Python
|
mit
| 1,939
| 0.001031
|
from __future__ import unicode_literals, division, absolute_import
import logging
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from flexget import plugin
from flexget.event
|
import event
try:
# NOTE: Importing other plugins is discouraged!
from flexget.components.parsing.parsers import parser_common as plugin_parser_common
except ImportError:
raise plugin.DependencyError(issued_by=__name__, missing='parser_common')
log = logging.getLogger('metainfo_movie')
class MetainfoMovie(object):
"""
Check if entry appears to be
|
a movie, and populate movie info if so.
"""
schema = {'type': 'boolean'}
def on_task_metainfo(self, task, config):
# Don't run if we are disabled
if config is False:
return
for entry in task.entries:
# If movie parser already parsed this, don't touch it.
if entry.get('id'):
continue
self.guess_entry(entry)
@staticmethod
def guess_entry(entry):
"""
Populates movie_* fields for entries that are successfully parsed.
:param entry: Entry that's being processed
:return: True for successful parse
"""
if entry.get('movie_guessed'):
# Return true if we already parsed this
return True
parser = plugin.get('parsing', 'metainfo_movie').parse_movie(data=entry['title'])
if parser and parser.valid:
parser.name = plugin_parser_common.normalize_name(
plugin_parser_common.remove_dirt(parser.name)
)
for field, value in parser.fields.items():
if not entry.is_lazy(field) and not entry.get(field):
entry[field] = value
return True
return False
@event('plugin.register')
def register_plugin():
plugin.register(MetainfoMovie, 'metainfo_movie', api_ver=2)
|
OpenNingia/l5r-character-manager
|
l5rcm/models/advancements/rank.py
|
Python
|
gpl-3.0
| 3,132
| 0.014368
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Daniele Simonetti
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
class Rank(object):
def __init__(self):
# the clan
self.clan = None
# the family
self.family = None
# the target school
self.school = None
# the insight rank
self.rank = 0
# the school rank
self.school_rank = 0
# the learned tech
self.tech = None
# is 'school' an alternate path
self.is_alternate_path = False
# the original school
self.original_school = None
# the character left an alternate path
self.left_alternate_path = False
# skills
self.skills = []
out['clan'] = self.clan
def to_dict(self):
out = {}
out['family'] = self.family
out['school'] = self.school
out['rank'] = self.rank
out['school_rank'] = self.school_rank
out['tech'] = self.tech
out['is_alternate_path'] = self.is_alternate_path
out['original_
|
school'] = self.original_school
out['left_alternate_path'] = self.left_alternate_path
out['skills'] = []
for s in self.skills:
out['skills'].append( s.to_dict() )
return out
class StartingSkill(object):
def __init__(self, skill_id, rank = 1, emphasis = None):
self.skill_id = skill_id
|
self.rank = rank
self.emphasis = emphasis
def to_dict(self):
out = {}
out['skill_id'] = self.skill_id
out['rank' ] = self.rank
out['emphasis'] = self.emphasis
return out
class CustomStartingSkill(object):
def __init__(self, options, rank = 1):
self.rank = rank
self.options = options # ( value, modifier )
#self.value = value
#self.modifier = modifier
def to_dict(self):
out = {}
out['rank' ] = self.rank
out['options' ] = self.options
return out
class CustomStartingSpells(object):
def __init__(self, element, tag, count = 1):
self.element = element
self.tag = tag
self.count = count
def to_dict(self):
out = {}
out['element'] = self.element
out['tag' ] = self.tag
out['count' ] = self.count
return out
|
djbaldey/django
|
django/core/serializers/python.py
|
Python
|
bsd-3-clause
| 7,685
| 0.004294
|
"""
A Python "serializer". Doesn't do much serializing per se -- just converts to
and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
other serializers.
"""
from __future__ import unicode_literals
from collections import OrderedDict
from django.apps import apps
from django.conf import settings
from django.core.serializers import base
from django.db import DEFAULT_DB_ALIAS, models
from django.utils import six
from django.utils.encoding import force_text, is_protected_type
class Serializer(base.Serializer):
"""
Serializes a QuerySet to basic Python objects.
"""
internal_use_only = True
def start_serialization(self):
self._current = None
self.objects = []
def end_serialization(self):
pass
def start_object(self, obj):
self._current = OrderedDict()
def end_object(self, obj):
self.objects.append(self.get_dump_object(obj))
self._current = None
def get_dump_object(self, obj):
model = obj._meta.proxy_for_model if obj._deferred else obj.__class__
data = OrderedDict([('model', force_text(model._meta))])
if not self.use_natural_primary_keys or not hasattr(obj, 'natural_key'):
data["pk"] = force_text(obj._get_pk_val(), strings_only=True)
data['fields'] = self._current
return data
def handle_field(self, obj, field):
value = field._get_val_from_obj(obj)
# Protected types (i.e., primitives like None, numbers, dates,
# and Decimals) are passed through as is. All other values are
# converted to string first.
if is_protected_type(value):
self._current[field.name] = value
else:
self._current[field.name] = field.value_to_string(obj)
def handle_fk_field(self, obj,
|
field):
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
related = getattr(obj, field.name)
if related:
value = related.natural_key()
else:
value = None
else:
value = getattr(obj, field.get_attname())
if not is_protected_type(value):
value = field.value_to_string(obj)
self._current[field.name] = value
|
def handle_m2m_field(self, obj, field):
if field.remote_field.through._meta.auto_created:
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
m2m_value = lambda value: value.natural_key()
else:
m2m_value = lambda value: force_text(value._get_pk_val(), strings_only=True)
self._current[field.name] = [m2m_value(related)
for related in getattr(obj, field.name).iterator()]
def getvalue(self):
return self.objects
def Deserializer(object_list, **options):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
db = options.pop('using', DEFAULT_DB_ALIAS)
ignore = options.pop('ignorenonexistent', False)
for d in object_list:
# Look up the model and starting build a dict of data for it.
try:
Model = _get_model(d["model"])
except base.DeserializationError:
if ignore:
continue
else:
raise
data = {}
if 'pk' in d:
try:
data[Model._meta.pk.attname] = Model._meta.pk.to_python(d.get('pk'))
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), None)
m2m_data = {}
field_names = {f.name for f in Model._meta.get_fields()}
# Handle each field
for (field_name, field_value) in six.iteritems(d["fields"]):
if ignore and field_name not in field_names:
# skip fields no longer on model
continue
if isinstance(field_value, str):
field_value = force_text(
field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True
)
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.remote_field and isinstance(field.remote_field, models.ManyToManyRel):
if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'):
def m2m_convert(value):
if hasattr(value, '__iter__') and not isinstance(value, six.text_type):
return field.remote_field.model._default_manager.db_manager(db).get_by_natural_key(*value).pk
else:
return force_text(field.remote_field.model._meta.pk.to_python(value), strings_only=True)
else:
m2m_convert = lambda v: force_text(field.remote_field.model._meta.pk.to_python(v), strings_only=True)
try:
m2m_data[field.name] = []
for pk in field_value:
m2m_data[field.name].append(m2m_convert(pk))
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), pk)
# Handle FK fields
elif field.remote_field and isinstance(field.remote_field, models.ManyToOneRel):
if field_value is not None:
try:
if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'):
if hasattr(field_value, '__iter__') and not isinstance(field_value, six.text_type):
obj = field.remote_field.model._default_manager.db_manager(db).get_by_natural_key(*field_value)
value = getattr(obj, field.remote_field.field_name)
# If this is a natural foreign key to an object that
# has a FK/O2O as the foreign key, use the FK value
if field.remote_field.model._meta.pk.remote_field:
value = value.pk
else:
value = field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value)
data[field.attname] = value
else:
data[field.attname] = field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
else:
data[field.attname] = None
# Handle all other fields
else:
try:
data[field.name] = field.to_python(field_value)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
obj = base.build_instance(Model, data, db)
yield base.DeserializedObject(obj, m2m_data)
def _get_model(model_identifier):
"""
Helper to look up a model from an "app_label.model_name" string.
"""
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError("Invalid model identifier: '%s'" % model_identifier)
|
wojnilowicz/git-cola
|
cola/models/dag.py
|
Python
|
gpl-2.0
| 8,995
| 0.000222
|
from __future__ import division, absolute_import, unicode_literals
import json
from .. import core
from .. import utils
from ..git import git
from ..observable import Observable
# put summary at the end b/c it can contain
# any number of funky characters, including the separator
logfmt = 'format:%H%x01%P%x01%d%x01%an%x01%ad%x01%ae%x01%s'
logsep = chr(0x01)
class CommitFactory(object):
root_generation = 0
commits = {}
@classmethod
def reset(cls):
cls.commits.clear()
cls.root_generation = 0
@classmethod
def new(cls, sha1=None, log_entry=None):
if not sha1 and log_entry:
sha1 = log_entry[:40]
try:
commit = cls.commits[sha1]
if log_entry and not commit.parsed:
commit.parse(log_entry)
cls.root_generation = max(commit.generation,
c
|
ls.root_generation)
except KeyError:
commit =
|
Commit(sha1=sha1,
log_entry=log_entry)
if not log_entry:
cls.root_generation += 1
commit.generation = max(commit.generation,
cls.root_generation)
cls.commits[sha1] = commit
return commit
class DAG(Observable):
ref_updated = 'ref_updated'
count_updated = 'count_updated'
def __init__(self, ref, count):
Observable.__init__(self)
self.ref = ref
self.count = count
self.overrides = {}
def set_ref(self, ref):
changed = ref != self.ref
if changed:
self.ref = ref
self.notify_observers(self.ref_updated)
return changed
def set_count(self, count):
changed = count != self.count
if changed:
self.count = count
self.notify_observers(self.count_updated)
return changed
def set_arguments(self, args):
if args is None:
return
if self.set_count(args.count):
self.overrides['count'] = args.count
if hasattr(args, 'args') and args.args:
ref = core.list2cmdline(args.args)
if self.set_ref(ref):
self.overrides['ref'] = ref
def overridden(self, opt):
return opt in self.overrides
def paths(self):
all_refs = utils.shell_split(self.ref)
if '--' in all_refs:
all_refs = all_refs[all_refs.index('--'):]
return [p for p in all_refs if p and core.exists(p)]
class Commit(object):
root_generation = 0
__slots__ = ('sha1',
'summary',
'parents',
'children',
'tags',
'author',
'authdate',
'email',
'generation',
'parsed')
def __init__(self, sha1=None, log_entry=None):
self.sha1 = sha1
self.summary = None
self.parents = []
self.children = []
self.tags = set()
self.email = None
self.author = None
self.authdate = None
self.parsed = False
self.generation = CommitFactory.root_generation
if log_entry:
self.parse(log_entry)
def parse(self, log_entry, sep=logsep):
self.sha1 = log_entry[:40]
after_sha1 = log_entry[41:]
details = after_sha1.split(sep, 5)
(parents, tags, author, authdate, email, summary) = details
self.summary = summary and summary or ''
self.author = author and author or ''
self.authdate = authdate or ''
self.email = email and email or ''
if parents:
generation = None
for parent_sha1 in parents.split(' '):
parent = CommitFactory.new(sha1=parent_sha1)
parent.children.append(self)
if generation is None:
generation = parent.generation+1
self.parents.append(parent)
generation = max(parent.generation+1, generation)
self.generation = generation
if tags:
for tag in tags[2:-1].split(', '):
self.add_label(tag)
self.parsed = True
return self
def add_label(self, tag):
"""Add tag/branch labels from `git log --decorate ....`"""
if tag.startswith('tag: '):
tag = tag[5:] # tag: refs/
elif tag.startswith('refs/remotes/'):
tag = tag[13:] # refs/remotes/
elif tag.startswith('refs/heads/'):
tag = tag[11:] # refs/heads/
if tag.endswith('/HEAD'):
return
# Git 2.4 Release Notes (draft)
# =============================
#
# Backward compatibility warning(s)
# ---------------------------------
#
# This release has a few changes in the user-visible output from
# Porcelain commands. These are not meant to be parsed by scripts, but
# the users still may want to be aware of the changes:
#
# * Output from "git log --decorate" (and "%d" format specifier used in
# the userformat "--format=<string>" parameter "git log" family of
# command takes) used to list "HEAD" just like other tips of branch
# names, separated with a comma in between. E.g.
#
# $ git log --decorate -1 master
# commit bdb0f6788fa5e3cacc4315e9ff318a27b2676ff4 (HEAD, master)
# ...
#
# This release updates the output slightly when HEAD refers to the tip
# of a branch whose name is also shown in the output. The above is
# shown as:
#
# $ git log --decorate -1 master
# commit bdb0f6788fa5e3cacc4315e9ff318a27b2676ff4 (HEAD -> master)
# ...
#
# C.f. http://thread.gmane.org/gmane.linux.kernel/1931234
head_arrow = 'HEAD -> '
if tag.startswith(head_arrow):
self.tags.add('HEAD')
self.tags.add(tag[len(head_arrow):])
else:
self.tags.add(tag)
def __str__(self):
return self.sha1
def data(self):
return {
'sha1': self.sha1,
'summary': self.summary,
'author': self.author,
'authdate': self.authdate,
'parents': [p.sha1 for p in self.parents],
'tags': self.tags,
}
def __repr__(self):
return json.dumps(self.data(), sort_keys=True, indent=4)
def is_fork(self):
''' Returns True if the node is a fork'''
return len(self.children) > 1
def is_merge(self):
''' Returns True if the node is a fork'''
return len(self.parents) > 1
class RepoReader(object):
def __init__(self, ctx, git=git):
self.ctx = ctx
self.git = git
self._proc = None
self._objects = {}
self._cmd = ['git', 'log',
'--topo-order',
'--reverse',
'--pretty='+logfmt]
self._cached = False
"""Indicates that all data has been read"""
self._idx = -1
"""Index into the cached commits"""
self._topo_list = []
"""List of commits objects in topological order"""
cached = property(lambda self: self._cached)
"""Return True when no commits remain to be read"""
def __len__(self):
return len(self._topo_list)
def reset(self):
CommitFactory.reset()
if self._proc:
self._topo_list = []
self._proc.kill()
self._proc = None
self._cached = False
def __iter__(self):
if self._cached:
return self
self.reset()
return self
def next(self):
if self._cached:
try:
self._idx += 1
return self._topo_list[self._idx]
except IndexError:
self._idx = -1
raise StopIteration
if self._proc is None:
ref_args = utils.shell_split(self.ctx.ref)
cmd = self.
|
luisgustavossdd/TBD
|
client/pygameclient/widgets/regnancyStyle.py
|
Python
|
gpl-3.0
| 3,306
| 0.003932
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import pygame
import os
fullname = os.path.join('res', 'gui')
def init(gui):
buttonsurf = pygame.image.load(os.path.join(fullname, 'button.png')).convert_alpha()
closesurf = pygame.image.load(os.path.join(fullname,
'closebutton.png')).convert_alpha()
shadesurf = pygame.image.load(os.path.join(fullname,
'shadebutton.png')).convert_alpha()
checksurf = pygame.image.load(os.path.join(fullname, 'checkbox.png')).convert_alpha()
optionsurf = pygame.image.load(os.path.join(fullname,
'optionbox.png')).convert_alpha()
combosurf = pygame.image.load(os.path.join(fullname, 'combobox.png')).convert_alpha()
gui.defaultFont = pygame.font.Font(os.path.join('res/fonts',
'font.ttf'), 19)
gui.defaultLabelStyle = {'font-color': (255, 255, 255), 'font': gui.defaultFont,
'autosize': True, "antialias": True,
'border-width': 0, 'border-color': (255,
255, 255), 'wordwrap': False}
gui.defaultButtonStyle = gui.createButtonStyle(
gui.defaultFont,
(0, 0, 0),
buttonsurf,
4,
1,
4,
4,
1,
4,
4,
1,
4,
4,
1,
4,
)
closeButtonStyle = gui.createImageButtonStyle(closesurf, 20)
shadeButtonStyle = gui.createImageButtonStyle(shadesurf, 20)
gui.defaultWindowStyle = {
'font': gui.defaultFont,
'font-color': (255, 255, 255),
'bg-color': (0, 0, 0, 150),
'shaded-bg-color': (0, 50, 100, 100),
'shaded-font-color': (255, 200, 0),
'border-width': 1,
'border-color': (150, 150, 150, 255),
'offset': (5, 5),
'close-button-style': closeButtonStyle,
'shade-button-style': shadeButtonStyle,
}
gui.defaultTextBoxStyle = {
'font': gui.defaultFont,
'font-color': (255, 255, 255),
'bg-color-normal': (55, 55, 55),
'bg-color-focus': (70, 70, 80),
'border-color-normal': (0, 0, 0),
'border-color-focus': (0, 50, 50),
'border-width': 1,
'appearence': gui.APP_3D,
"antialias": True,
'offset': (4, 4),
}
gui.defaultCheckBoxStyle = gui.createCheckBoxStyle(gui.defaultFont,
checksurf, 12, (255, 255, 255), (100, 100, 100), autosize=
True)
gui.defaultOptionBoxStyle = gui.createOptionBoxStyle(gui.defaultFont,
optionsurf, 12, (255, 255, 255), (100, 100, 100), autosize=
True)
gui.defaultListBoxStyle = {
'font': gui.de
|
faultFont,
'font-color': (255, 255, 255),
'font-color-selecte
|
d': (0, 0, 0),
'bg-color': (55, 55, 55),
'bg-color-selected': (160, 180, 200),
'bg-color-over': (60, 70, 80),
'border-width': 1,
'border-color': (0, 0, 0),
'item-height': 22,
'padding': 2,
'autosize': False,
}
gui.defaultComboBoxStyle = gui.createComboBoxStyle(gui.defaultFont,
combosurf, 15, (255, 255, 255), borderwidth=1, bordercolor=(31,
52, 78), bgcolor=(55, 55, 55))
|
lskillen/pylucene
|
test/test_PrefixFilter.py
|
Python
|
apache-2.0
| 4,217
| 0.000949
|
# ====================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ====================================================================
import sys, lucene, unittest
from PyLuceneTestCase import PyLuceneTestCase
from org.apache.lucene.document import Document, Field, StringField
from org.apache.lucene.index import Term
from org.apache.lucene.search import ConstantScoreQuery, PrefixFilter
class PrefixFilterTestCase(PyLuceneTestCase):
"""
Unit tests ported from Java Lucene
"""
def testPrefixFilter(self):
writer = self.getWriter()
categories = ["/Computers/Linux",
"/Computers/Mac/One",
"/Computers/Mac/Two",
|
"/Computers/Windows"]
for category in categories:
doc = Document()
doc.add(Field("category", category, StringField.TYPE_STORED))
writer.addDocument(doc)
writer.close()
# PrefixFilter combined with ConstantScoreQuery
filter = PrefixFilter(Term("category", "/Computers"))
query = ConstantScoreQuery(filter)
searcher = self.getSe
|
archer()
topDocs = searcher.search(query, 50)
self.assertEqual(4, topDocs.totalHits,
"All documents in /Computers category and below")
# test middle of values
filter = PrefixFilter(Term("category", "/Computers/Mac"))
query = ConstantScoreQuery(filter)
topDocs = searcher.search(query, 50)
self.assertEqual(2, topDocs.totalHits, "Two in /Computers/Mac")
# test start of values
filter = PrefixFilter(Term("category", "/Computers/Linux"))
query = ConstantScoreQuery(filter)
topDocs = searcher.search(query, 50)
self.assertEqual(1, topDocs.totalHits, "One in /Computers/Linux")
# test end of values
filter = PrefixFilter(Term("category", "/Computers/Windows"))
query = ConstantScoreQuery(filter)
topDocs = searcher.search(query, 50)
self.assertEqual(1, topDocs.totalHits, "One in /Computers/Windows")
# test non-existant
filter = PrefixFilter(Term("category", "/Computers/ObsoleteOS"))
query = ConstantScoreQuery(filter)
topDocs = searcher.search(query, 50)
self.assertEqual(0, topDocs.totalHits, "no documents")
# test non-existant, before values
filter = PrefixFilter(Term("category", "/Computers/AAA"))
query = ConstantScoreQuery(filter)
topDocs = searcher.search(query, 50)
self.assertEqual(0, topDocs.totalHits, "no documents")
# test non-existant, after values
filter = PrefixFilter(Term("category", "/Computers/ZZZ"))
query = ConstantScoreQuery(filter)
topDocs = searcher.search(query, 50)
self.assertEqual(0, topDocs.totalHits, "no documents")
# test zero-length prefix
filter = PrefixFilter(Term("category", ""))
query = ConstantScoreQuery(filter)
topDocs = searcher.search(query, 50)
self.assertEqual(4, topDocs.totalHits, "all documents")
# test non-existant field
filter = PrefixFilter(Term("nonexistantfield", "/Computers"))
query = ConstantScoreQuery(filter)
topDocs = searcher.search(query, 50)
self.assertEqual(0, topDocs.totalHits, "no documents")
if __name__ == "__main__":
lucene.initVM(vmargs=['-Djava.awt.headless=true'])
if '-loop' in sys.argv:
sys.argv.remove('-loop')
while True:
try:
unittest.main()
except:
pass
else:
unittest.main()
|
GhalebKhaled/fb-bot-test
|
bot/api/views.py
|
Python
|
apache-2.0
| 406
| 0.004926
|
import FBBot
fb_client = FBBot.FBBotClient()
class WebhookView(FBBot.FBBotWebhookView):
|
def handle_message(self, message, sender_id):
if m
|
essage == "logo":
fb_client.send_image(sender_id, "https://d2for33x7as0fp.cloudfront.net/static/images/53-logo.71a393299d20.png")
else:
fb_client.send_message(sender_id, "I can only repeat right now:{}".format(message))
|
JiaruZhang/Five
|
main/migrations/0002_auto_20170512_1731.py
|
Python
|
apache-2.0
| 2,835
| 0.001764
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11a1 on 2017-05-12 17:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ArrivalMessage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.DateField(auto_now_add=True)),
('userID', models.BigIntegerField()),
('bookID', models.BigIntegerField()),
|
],
),
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bookID', models.BigIntegerField()),
('ISBN', models.CharField(max_length=20)),
('bookName', models.CharField(max_length=20)),
('bookType', models.CharField(max_length=10)),
('price', models.IntegerField()),
|
],
),
migrations.CreateModel(
name='FavoredBook',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('userID', models.BigIntegerField()),
('bookID', models.BigIntegerField()),
],
),
migrations.CreateModel(
name='StudentUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('userID', models.BigIntegerField(unique=True)),
('userName', models.CharField(max_length=20)),
('password', models.CharField(max_length=20)),
('name', models.CharField(max_length=20)),
('mailBox', models.EmailField(max_length=254)),
('phoneNumber', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='SubscribedBook',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('userID', models.BigIntegerField()),
('bookID', models.BigIntegerField()),
],
),
migrations.DeleteModel(
name='allBook',
),
migrations.DeleteModel(
name='favor',
),
migrations.DeleteModel(
name='message',
),
migrations.DeleteModel(
name='student_users',
),
migrations.DeleteModel(
name='subscribeBooks',
),
]
|
bellowsj/aiopogo
|
aiopogo/pogoprotos/networking/responses/add_fort_modifier_response_pb2.py
|
Python
|
mit
| 4,943
| 0.00526
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pogoprotos/networking/responses/add_fort_modifier_response.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from pogoprotos.networking.responses import fort_details_response_pb2 as pogoprotos_dot_networking_dot_responses_dot_fort__details__response__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='pogoprotos/networking/responses/add_fort_modifier_response.proto',
package='pogoprotos.networking.responses',
syntax='proto3',
serialized_pb=_b('\n@pogoprotos/networking/responses/add_fort_modifier_response.proto\x12\x1fpogoprotos.networking.responses\x1a;pogoprotos/networking/responses/fort_details_response.proto\"\xc2\x02\n\x17\x41\x64\x64\x46ortModifierResponse\x12O\n\x06result\x18\x01 \x01(\x0e\x32?.pogoprotos.networking.responses.AddFortModifierResponse.Result\x12J\n\x0c\x66ort_details\x18\x02 \x01(\x0b\x32\x34.pogoprotos.networking.responses.FortDetailsResponse\"\x89\x01\n\x06Result\x12\x11\n\rNO_RESULT_SET\x10\x00\x12\x0b\n\x07SUCCESS\x10\x01\x12\x1d\n\x19\x46ORT_ALREADY_HAS_MODIFIER\x10\x02\x12\x10\n\x0cTOO_FAR_AWAY\x10\x03\x12\x18\n\x14NO_ITEM_IN_INVENTORY\x10\x04\x12\x14\n\x10POI_INACCESSIBLE\x10\x05\x62\x06proto3')
,
dependencies=[pogoprotos_dot_networking_dot_responses_dot_fort__details__response__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_ADDFORTMODIFIERRESPONSE_RESULT = _descriptor.EnumDescriptor(
name='Result',
full_name='pogoprotos.networking.responses.AddFortModifierResponse.Result',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NO_RESULT_SET', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUCCESS', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FORT_ALREADY_HAS_MODIFIER', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TOO_FAR_AWAY', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NO_ITEM_IN_INVENTORY', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POI_INACCESSIBLE', index=5, number=5,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=348,
serialized_end=485,
)
_sym_db.RegisterEnumDescriptor(_ADDFORTMODIFIERRESPONSE_RESULT)
_ADDFORTMODIFIERRESPONSE = _descriptor.Descriptor(
name='AddFortModifierResponse',
full_name='pogoprotos.networking.responses.AddFortModifierResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='pogoprotos.networking.responses.AddFortModifierResponse.result', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fort_details', full_name='pogoprotos.networking.responses.AddFortModifierResponse.fort_details', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
|
extensions=[
],
nested_types=[],
enum_types=[
_ADDFORTMODIFIERRESPONSE_RESULT,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=163,
serialized_end=485,
)
_ADDFORTMODIFIERRESPONSE.fields_by_name['result'].enum_type = _ADDFORTMODIFIERRESPONSE_RESULT
_ADDFORTMODIFIERRESPONSE.fields_by_name['fort_details'].message_type = pogoprotos_dot_networking_dot_responses_dot_fort__details__response__pb2._FORTDET
|
AILSRESPONSE
_ADDFORTMODIFIERRESPONSE_RESULT.containing_type = _ADDFORTMODIFIERRESPONSE
DESCRIPTOR.message_types_by_name['AddFortModifierResponse'] = _ADDFORTMODIFIERRESPONSE
AddFortModifierResponse = _reflection.GeneratedProtocolMessageType('AddFortModifierResponse', (_message.Message,), dict(
DESCRIPTOR = _ADDFORTMODIFIERRESPONSE,
__module__ = 'pogoprotos.networking.responses.add_fort_modifier_response_pb2'
# @@protoc_insertion_point(class_scope:pogoprotos.networking.responses.AddFortModifierResponse)
))
_sym_db.RegisterMessage(AddFortModifierResponse)
# @@protoc_insertion_point(module_scope)
|
dbxnr/redditbot
|
redditbot.py
|
Python
|
gpl-3.0
| 4,394
| 0.002048
|
#!/usr/bin/env python3
import argparse
import feedparser
import logging
import praw
import requests
from config import *
from bs4 import BeautifulSoup
from html2text import html2text
logging.basicConfig(format='%(asctime)s::%(levelname)s:%(message)s',
filename='redditbot.log',
level=logging.INFO)
# Argument parser
parser = argparse.ArgumentParser()
# Specify username to use
parser.add_argument('-U', '--user', nargs=2, help="Usage: [username][password]")
# Overrides subreddit and storefront config values.
parser.add_argument('-d', '--destination', help="Usage: [subreddit][steamappid]", nargs=2)
# Verbose flag
parser.add_argument('-v', '--verbose', action='store_true')
args = parser.parse_args()
if args.user:
username = args.user[0]
password = args.user[1]
if args.destination:
subreddit = args.destination[0]
steam_app_id = re.findall('(?!\p\/)([0-9]+)', args.destination[1])[0]
if args.verbose:
verbose = True
# Authentication sequence
def login():
"""Opens an authenticated connection to Reddit"""
if args.user:
r = praw.Reddit(client_id=client_id,
client_secret=client_secret,
username=username,
password=password,
|
user_agent=user_agent)
else:
r = praw.
|
Reddit(client_id=client_id,
client_secret=client_secret,
refresh_token=refresh_token,
user_agent=user_agent)
logging.debug('Logged in as {}'.format(r.user.me()))
return r
# Steam news retrieval
def get_news(r, steam_app_id):
"""Parses a JSON nugget checking for new items, returns them if so"""
results = r.request(method='GET', path=steam_url.format(steam_app_id))
mr = results["appnews"]["newsitems"][0]
mr_title, mr_url, mr_contents, mr_gid = mr['title'], mr['url'], mr['contents'], mr['gid']
if check_history(mr_gid):
return [mr_title, mr_url, mr_contents, mr_gid]
def check_history(gid):
"""Checks to see if news has been posted before"""
with open(gid_path, 'r') as file:
if gid in file.read():
return False
else:
return True
# Parse data from an RSS feed
def get_rss(rss_url):
"""Parses an RSS url and returns some information"""
p = feedparser.parse(rss_url)
mr = p.entries[0]
mr_title, mr_url, mr_gid = mr.published[:-6], mr.link, mr.title
r = requests.get(mr.link)
soup = BeautifulSoup(r.text, 'lxml')
soup_text = soup.section
mr_contents = html2text(str(soup_text)) # Converts HTML to markdown
if check_history(mr_gid):
return [mr_title, mr_url, mr_contents, mr_gid]
# Submit post
def post_news(news, subreddit):
"""Formats the text into Markdown and posts it to Reddit"""
content = news[2]
content = content.replace('[*]',' * ') # Bullet point fix
content += "\n\n [Link to website]({})".format(news[1])
# TODO: [url] handling
destination = r.subreddit(subreddit)
try:
# Submit the post
destination.submit(title="[Patch Notes] " + news[0], selftext=content)
logging.info(print('Wrote {} to /r/{}...'.format(news[0], subreddit)))
except praw.exceptions.APIException as e:
logging.critical(e)
print(e)
quit()
# Writes the ID to a file
with open(gid_path, 'a') as file:
file.write(news[3] + "\n")
def post_steam_items():
# TODO: Check for presence first
for subreddit, steam_app_id in steam_sources.items():
news = get_news(r, steam_app_id)
try:
post_news(news, subreddit)
except TypeError:
logging.debug('No recent Steam items')
def post_rss_items():
# TODO: Check for presence first
for subreddit, rss_url in rss_sources.items():
news = get_rss(rss_url)
try:
post_news(news, subreddit)
except TypeError:
logging.debug('No recent RSS items')
# Does the stuff
r = login()
if steam_sources:
post_steam_items()
if rss_sources:
post_rss_items()
logging.info("Script executed")
# TODO: --help messages for argparse
# TODO: Add flag for posting maintainer message
# TODO: Add ability to post specific news items (currently limited to most recent)
# TODO: Update requirements, including lxml
|
sandeep82945/audience-predictor
|
src/python/read_data.py
|
Python
|
cc0-1.0
| 396
| 0.037879
|
#Read data from stdin
import sys, json
def read_sentence():
lines = sys.stdin.readlines()
#Since our input would only be having one
|
line, parse our JSON data from that
return json.loads(lines[0])
def read_sentence1():
line = sys.stdin.readline()
#Since our input would only be having one line, p
|
arse our JSON data from that
return line
def convert(obj):
return json.dumps(obj)
|
angadpc/Alexa-Project-
|
twilio/rest/taskrouter/v1/workspace/__init__.py
|
Python
|
mit
| 24,809
| 0.001491
|
# coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
from twilio.rest.taskrouter.v1.workspace.activity import ActivityList
from twilio.rest.taskrouter.v1.workspace.event import EventList
from twilio.rest.taskrouter.v1.workspace.task import TaskList
from twilio.rest.taskrouter.v1.workspace.task_channel import TaskChannelList
from twilio.rest.taskrouter.v1.workspace.task_queue import TaskQueueList
from twilio.rest.taskrouter.v1.workspace.worker import WorkerList
from twilio.rest.taskrouter.v1.workspace.workflow import WorkflowList
from twilio.rest.taskrouter.v1.workspace.workspace_statistics import WorkspaceStatisticsList
class WorkspaceList(ListResource):
def __init__(self, version):
"""
Initialize the WorkspaceList
:param Version version: Version that contains the resource
:returns: twilio.rest.taskrouter.v1.workspace.WorkspaceList
:rtype: twilio.rest.taskrouter.v1.workspace.WorkspaceList
"""
super(WorkspaceList, self).__init__(version)
# Path Solution
self._solution = {}
self._uri = '/Workspaces'.format(**self._solution)
def stream(self, friendly_name=values.unset, limit=None, page_size=None):
"""
Streams WorkspaceInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param unicode friendly_name: The friendly_name
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.taskrouter.v1.workspace.WorkspaceInstance]
"""
limits = self._version.read_limits(limit, page_size)
|
page = self.page(
friendly_name=friendly_name,
page_size=limits['page_size'],
)
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, friendly_name=values.unset, limit=None, page_size=None):
"""
Lists WorkspaceInstance records from the API as a list.
Unlike stream(), this operation is ea
|
ger and will load `limit` records into
memory before returning.
:param unicode friendly_name: The friendly_name
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.taskrouter.v1.workspace.WorkspaceInstance]
"""
return list(self.stream(
friendly_name=friendly_name,
limit=limit,
page_size=page_size,
))
def page(self, friendly_name=values.unset, page_token=values.unset,
page_number=values.unset, page_size=values.unset):
"""
Retrieve a single page of WorkspaceInstance records from the API.
Request is executed immediately
:param unicode friendly_name: The friendly_name
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of WorkspaceInstance
:rtype: twilio.rest.taskrouter.v1.workspace.WorkspacePage
"""
params = values.of({
'FriendlyName': friendly_name,
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(
'GET',
self._uri,
params=params,
)
return WorkspacePage(self._version, response, self._solution)
def create(self, friendly_name, event_callback_url=values.unset,
events_filter=values.unset, multi_task_enabled=values.unset,
template=values.unset, prioritize_queue_order=values.unset):
"""
Create a new WorkspaceInstance
:param unicode friendly_name: The friendly_name
:param unicode event_callback_url: The event_callback_url
:param unicode events_filter: The events_filter
:param bool multi_task_enabled: The multi_task_enabled
:param unicode template: The template
:param WorkspaceInstance.QueueOrder prioritize_queue_order: The prioritize_queue_order
:returns: Newly created WorkspaceInstance
:rtype: twilio.rest.taskrouter.v1.workspace.WorkspaceInstance
"""
data = values.of({
'FriendlyName': friendly_name,
'EventCallbackUrl': event_callback_url,
'EventsFilter': events_filter,
'MultiTaskEnabled': multi_task_enabled,
'Template': template,
'PrioritizeQueueOrder': prioritize_queue_order,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return WorkspaceInstance(
self._version,
payload,
)
def get(self, sid):
"""
Constructs a WorkspaceContext
:param sid: The sid
:returns: twilio.rest.taskrouter.v1.workspace.WorkspaceContext
:rtype: twilio.rest.taskrouter.v1.workspace.WorkspaceContext
"""
return WorkspaceContext(
self._version,
sid=sid,
)
def __call__(self, sid):
"""
Constructs a WorkspaceContext
:param sid: The sid
:returns: twilio.rest.taskrouter.v1.workspace.WorkspaceContext
:rtype: twilio.rest.taskrouter.v1.workspace.WorkspaceContext
"""
return WorkspaceContext(
self._version,
sid=sid,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Taskrouter.V1.WorkspaceList>'
class WorkspacePage(Page):
def __init__(self, version, response, solution):
"""
Initialize the WorkspacePage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:returns: twilio.rest.taskrouter.v1.workspace.WorkspacePage
:rtype: twilio.rest.taskrouter.v1.workspace.WorkspacePage
"""
super(WorkspacePage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of WorkspaceInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.taskrouter.v1.workspace.WorkspaceInstance
:rtype: twilio.rest.taskrouter.v1.workspace.WorkspaceInstance
"""
return WorkspaceInstance(
self._version,
|
Pylons/kai
|
kai/controllers/comments.py
|
Python
|
bsd-3-clause
| 2,956
| 0.004736
|
import logging
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
from pylons.templating import render_mako_def
from kai.lib.base import BaseController, render
from kai.lib.helpers import textilize
from kai.lib.serialization import render_feed
from kai.model import Comment
log = logging.getLogger(__name__)
class CommentsController(BaseController):
def preview(self):
data = request.POST['content']
return textilize(data)
def create(self, doc_id):
if not c.user:
abort(401)
# Ensure the doc
|
exists
doc = self.db.get(doc_id)
|
if not doc:
abort(404)
comment = Comment(doc_id=doc_id, displayname=c.user.displayname,
email=c.user.email, human_id=c.user.id,
content=request.POST['content'])
comment.store(self.db)
return ''
def delete(self, id):
if not c.user or not c.user.in_group('admin'):
abort(401)
# Ensure doc exists
doc = self.db.get(id)
if not doc:
abort(404)
# Make sure its a comment
if not doc['type'] == 'Comment':
abort(404)
self.db.delete(doc)
return ''
def index(self, format='html'):
if format == 'html':
abort(404)
elif format in ['atom', 'rss']:
# Pull comments and grab the docs with them for their info
comments = list(Comment.by_anytime(c.db, descending=True, limit=20))
commentdata = []
for comment_doc in comments:
comment = {}
displayname = comment_doc.displayname or 'Anonymous'
comment['created'] = comment_doc.created
id = comment_doc.id
doc = c.db.get(comment_doc.doc_id)
if doc['type'] == 'Traceback':
comment['title'] = '%s: %s' % (doc['exception_type'], doc['exception_value'])
else:
comment['title'] = doc.get('title', '-- No title --')
comment['type'] = doc['type']
comment['link'] = render_mako_def(
'/widgets.mako', 'comment_link', title=comment['title'],
comment_id=comment_doc.id, doc=doc, type=doc['type'],
urlonly=True).strip()
comment['doc_id'] = comment_doc.doc_id
comment['description'] = textilize(comment_doc.content)
commentdata.append(comment)
response.content_type = 'application/atom+xml'
return render_feed(
title="PylonsHQ Comment Feed", link=url.current(qualified=True),
description="Recent PylonsHQ comments", objects=commentdata,
pub_date='created')
|
maikodaraine/EnlightenmentUbuntu
|
bindings/python/python-efl/tests/elementary/test_01_basics.py
|
Python
|
unlicense
| 652
| 0.003067
|
#!/usr/bin/env python
import unittest
from efl.eo import Eo
from efl import elementary
from efl.elementary.window import Window, ELM_WIN_BASIC
from e
|
fl.elementary.button import Button
elementary.init()
class TestElmBasics(unittest.TestCase):
def setUp(self):
self.o = Window("t", ELM_WIN_BASIC)
def tearDown(self):
self.o.delete()
def testParentGet1(self):
self.assertIsNone(self.o.parent_get())
def testParentGet2(self):
o = Button(self.o)
self.assertEqual(Eo.parent_get(o), self.o)
o.delete()
i
|
f __name__ == '__main__':
unittest.main(verbosity=2)
elementary.shutdown()
|
blacksky0000/tools
|
tumblr/dbconnect.py
|
Python
|
mit
| 463
| 0.008639
|
import pymongo
import configparser
def db():
config = configparser.RawConfigParser()
config.read('./.config')
host = config.get('tumblr', 'host')
port = config.get('tumblr', 'port')
user = config.get('tumblr', 'user')
passwd = config.get('tumblr', 'passwd')
client = pymongo.MongoClient(host, int(port))
client.admin.authenticate(user, passwd, mechanism = 'SCRAM-SHA-1', source='test')
testDB = client.test
|
return testDB
|
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/PyQt4/QtGui/__init__/QGestureEvent.py
|
Python
|
gpl-2.0
| 2,497
| 0.007609
|
# encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib/python2.7/dist-packages/PyQt4/QtGui.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
class QGestureEvent(__PyQt4_QtCore.QEvent):
"""
QGestureEvent(list-of-QGesture)
QGestureEvent(QGestureEvent)
"""
def accept(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QGestureEvent.accept()
QGestureEvent.accept(QGesture)
QGestureEvent.accept(Qt.GestureType)
"""
pass
def activeGestures(self): # real signature unknown; restored from __doc__
""" QGestureEvent.activeGestures() -> list-of-QGesture """
pass
def canceledGestures(self): # real signature unknown; restored from __doc__
""" QGestureEvent.canceledGestures() -> list-of-QGesture """
pass
def gesture(self, Qt_GestureType): # real signature unknown; restored from __doc__
""" QGestureEvent.gesture(Qt.GestureType) -> QGesture """
|
return QGesture
def gestures(self): # real signature unknown; restored from __doc__
""" QGestureEvent.gestures() -> list-of-QGesture """
pass
def ignore(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QGestureEvent.ignore()
QGestureEvent.ignore(QGesture)
QGestureEvent.ignore(Qt.GestureType)
"""
pass
|
def isAccepted(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QGestureEvent.isAccepted() -> bool
QGestureEvent.isAccepted(QGesture) -> bool
QGestureEvent.isAccepted(Qt.GestureType) -> bool
"""
return False
def mapToGraphicsScene(self, QPointF): # real signature unknown; restored from __doc__
""" QGestureEvent.mapToGraphicsScene(QPointF) -> QPointF """
pass
def setAccepted(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
"""
QGestureEvent.setAccepted(bool)
QGestureEvent.setAccepted(QGesture, bool)
QGestureEvent.setAccepted(Qt.GestureType, bool)
"""
pass
def widget(self): # real signature unknown; restored from __doc__
""" QGestureEvent.widget() -> QWidget """
return QWidget
def __init__(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
pass
|
chichaj/PyFont
|
PyFont/FontRuntime.py
|
Python
|
mit
| 5,128
| 0.00117
|
#!/usr/bin/python3
import tkinter
import PIL.Image
import PIL.ImageTk
from tkinter.ttk import Progressbar as pbar
from PyFont import Font, SVG
class TkFont():
CHARY = 200
CHARX = 50
LINEY = CHARY / 2
MAIN_COLOR = '#FFFFFF'
def set_label(self):
tmp = self.words[-1].export_png_to_str()
photoImg = PIL.Image.open(tmp)
w, h = photoImg.size
nh = self.CHARY
coef = nh / h
nw = int(w * coef)
self.tmpx[-1] = int(self.words[-1].colors[SVG.SVG.LINK_COLOR_RIGHT].x *
coef) + self.CHARX
photoImg = photoImg.resize((nw, nh))#, PIL.Image.ANTIALIAS)
pix = photoImg.load()
found = False
miny = 0
for y in range(nh):
for x in range(nw):
if pix[x, y] != (0, 0, 0, 0):
miny = y
found = True
break
if found:
break
found = False
maxy = 0
for y in range(nh):
for x in range(nw):
if pix[x, nh - y - 1] != (0, 0, 0, 0):
maxy = nh - y - 1
found = True
break
if found:
break
if found:
photoImg = photoImg.crop((0, miny, nw, maxy))
photo = PIL.ImageTk.PhotoImage(photoImg)
self.labels[-1].place_forget()
self.labels[-1] = tkinter.Label(self.win, image=photo)
self.labels[-1].config(background=self.MAIN_COLOR)
self.labels[-1].image = photo
self.labels[-1].place(x = self.x[-1], y = self.y + miny)
def __init__(self, win, font, gui):
self.win = win
self.gui = gui
self.font = font
self.string = ""
self.words = []
self.labels = []
self.y = 0
self.x = [0]
self.tmpx = [0]
def backspace(self):
if not self.string:
return
if self.string[-1] == "\n":
self.tmpx = self.tmpx[:-1]
self.x = self.x[:-1]
self.y -= self.LINEY
elif self.string[-1] == " ":
self.tmpx = self.tmpx[:-1]
self.x[-1] -= self.tmpx[-1]
else:
self.words[-1].backspace()
self.set_label()
if self.string[-2:-1] in ["\n", " ", ""]:
self.words[-1].backspace()
self.words = self.words[:-1]
self.labels[-1].place_forget()
self.labels = self.labels[:-1]
self.string = self.string[:-1]
def ret(self):
self.y += self.LINEY
self.x += [0]
self.tmpx += [0]
self.string += "\n"
def space(self):
self.x[-1] += self.tmpx[-1]
self.tmpx += [0]
self.string += " "
def handle_char(self, c):
c = c.lower()
if c == "\b":
self.backspace()
elif c == "\r":
self.ret()
elif c == " ":
self.space()
elif c in self.font.chars:
svg = self.font.chr2svg(c)
if self.string[-1:] in ["\n", " ", ""]:
self.words += [svg]
self.labels += [tkinter.Label(self.win)]
else:
self.words[-1].link_with(svg)
self.set_label()
self.string += c
def theend(self):
if self.words:
svg = self.font.generate_svg("")
word = False
for c in self.string:
if c == " ":
word = False
svg.link_with(self.font.chr2svg(" "))
elif c == "\n":
word = False
svg.newline()
elif not word:
word = True
svg.link_with(self.words[0])
self.words = self.words[1:]
# bar.value += 100 / barlen
self.gui.the_end(svg)
self.win.destroy()
def export(self):
if self.words:
svg = self.font.generate_svg("")
word = False
for c in self.string:
if c == " ":
word = False
|
svg.link_with(self.font.chr2svg(" "))
elif c == "\n":
word = False
svg.newline()
elif not word:
word = True
svg.link_with
|
(self.words[0])
self.words = self.words[1:]
self.gui.the_end(svg)
def get_svg(self):
if self.words:
svg = self.font.generate_svg("")
word = False
for c in self.string:
if c == " ":
word = False
svg.link_with(self.font.chr2svg(" "))
elif c == "\n":
word = False
svg.newline()
elif not word:
word = True
svg.link_with(self.words[0])
self.words = self.words[1:]
return svg
return None
|
Southpaw-TACTIC/TACTIC
|
3rd_party/python2/site-packages/cherrypy/test/test_httpauth.py
|
Python
|
epl-1.0
| 6,303
| 0
|
from hashlib import md5, sha1
import cherrypy
from cherrypy._cpcompat import ntob
from cherrypy.lib import httpauth
from cherrypy.test import helper
class HTTPAuthTest(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def index(self):
return 'This is public.'
class DigestProtected:
@cherrypy.expose
def index(self):
return "Hello %s, you've been authorized." % (
cherrypy.request.login)
class BasicProtected:
@cherrypy.expose
def index(self):
return "Hello %s, you've been authorized." % (
cherrypy.request.login)
class BasicProtected2:
@cherrypy.expose
def index(self):
return "Hello %s, you've been authorized." % (
cherrypy.request.login)
def fetch_users():
return {'test': 'test'}
def sha_password_encrypter(password):
return sha1(ntob(password)).hexdigest()
def fetch_password(username):
return sha1(b'test').hexdigest()
conf = {
'/digest': {
'tools.digest_auth.on': True,
'tools.digest_auth.realm': 'localhost',
'tools.digest_auth.users': fetch_users
},
'/basic': {
'tools.basic_auth.on': True,
'tools.basic_auth.realm': 'localhost',
'tools.basic_auth.users': {
'test': md5(b'test').hexdigest()
}
},
'/basic2': {
'tools.basic_auth.on': True,
'tools.basic_auth.realm': 'localhost',
'tools.basic_auth.users': fetch_password,
'tools.basic_auth.encrypt': sha_password_encrypter
}
}
root = Root()
root.digest = DigestProtected()
root.basic = BasicProtected()
root.basic2 = BasicProtected2()
cherrypy.tree.mount(root, config=conf)
def testPublic(self):
self.getPage('/')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html;charset=utf-8')
self.assertBody('This is public.')
def testBasic(self):
self.getPage('/basic/')
self.assertStatus(401)
self.assertHeader('WWW-Authenticate', 'Basic realm="localhost"')
self.getPage('/basic/', [('Authorization', 'Basic dGVzdDp0ZX60')])
self.assertStatus(401)
self.getPage('/basic/', [('Authorization', 'Basic dGVzdDp0ZXN0')])
self.assertStatus('200 OK')
self.assertBody("Hello test, you've been authorized.")
def testBasic2(self):
self.getPage('/basic2/')
self.assertStatus(401)
self.assertHeader('WWW-Authenticate', 'Basic realm="localhost"')
self.getPage('/basic2/', [('Authorization', 'Basic dGVzdDp0ZX60')])
self.assertStatus(401)
self.getPage('/basic2/', [('Authorization', 'Basic dGVzdDp0ZXN0')])
self.assertStatus('200 OK')
self.assertBody("Hello test, you've been authorized.")
def testDigest(self):
self.getPage('/digest/')
self.assertStatus(401)
value = None
for k, v in self.headers:
if k.lower() == 'www-authenticate':
if v.startswith('Digest'):
value = v
break
if value is None:
self._handlewebError(
'Digest authentification scheme was not found')
value = value[7:]
items = value.split(', ')
tokens = {}
for item in items:
key, value = item.split('=')
tokens[key.lower()] = value
missing_msg = '%s is missing'
bad_value_msg = "'%s' was expecting '%s' but found '%s'"
nonce = None
if 'realm' not in tokens:
self._handlewebError(missing_msg % 'realm')
elif tokens['realm'] != '"localhost"':
self._handlewebError(bad_value_msg %
('realm', '"localhost"', tokens['realm']))
if 'nonce' not in tokens:
self._handlewebError(missing_msg % 'nonce')
else:
nonce = tokens['nonce'].strip('"')
if 'algorithm' not in tokens:
self._handlewebError(missing_msg % 'algorithm')
elif tokens['algorithm'] != '"MD5"':
self._handlewebError(bad_value_msg %
('algorithm', '"MD5"', tokens['algorithm']))
if 'qop' not in tokens:
self._handlewebError(missing_msg % 'qop')
elif tokens['qop'] != '"auth"':
self._handlewebError(bad_value_msg %
('qop', '"auth"', tokens['qop']))
# Test a wrong 'realm' value
base_auth = (
'Digest '
'username="test", '
'realm="wrong realm", '
'nonce="%s", '
'uri="/digest/", '
'algorithm=MD5, '
'response="%s", '
'qop=auth, '
'nc=%s, '
'cnonce="1522e61005789929"'
)
auth = base_auth % (nonce, '', '00000001')
params = httpauth.parseAuthorization(auth)
response = httpauth._computeDigestResponse(params, 'test')
auth = base_auth % (nonce, response, '00000001')
self.getPage('/digest/', [('Authorization', auth)])
self.assertStatus(401)
# Test that must pass
base_auth = (
'Digest '
'username="test", '
'realm="localhost", '
'nonce="%s", '
'uri="/digest/", '
'algorithm=MD5, '
'response="%s", '
'qop=auth, '
'nc=%s, '
'cnonce="1522e61005789929"'
|
)
auth = base_auth % (nonce, '', '00000001')
params = httpauth.parseAuthorization(auth)
response = httpauth._computeDigestResponse(params, 'test')
auth = base_auth % (nonce, response, '00000001')
self.getPage('/digest
|
/', [('Authorization', auth)])
self.assertStatus('200 OK')
self.assertBody("Hello test, you've been authorized.")
|
RaumZeit/gdesklets-core
|
shell2/MenuBar.py
|
Python
|
gpl-2.0
| 1,254
| 0.008772
|
import gtk
# TODO: the *_menu.append() calls here
|
cause a GTK assertion failure
# in the form of "GtkWarning: gtk_accel_label_set_accel_closure: assertion
# `gtk_accel_group_from_accel_closure (accel_closure) != NULL' failed"
# the exact reason is the create_menu_item-call, but I can't figure why
class MenuBar(gtk.MenuBar):
def __init__(self, main):
super(MenuBar, self).__init__()
self.__main = mai
|
n
ac = main.get_action_group('global')
file_menu = gtk.Menu()
file_mitem = gtk.MenuItem("_File")
file_mitem.set_submenu(file_menu)
file_menu.append(ac.get_action('quit').create_menu_item())
edit_menu = gtk.Menu()
edit_mitem = gtk.MenuItem("_Edit")
edit_mitem.set_submenu(edit_menu)
edit_menu.append(ac.get_action('update').create_menu_item())
edit_menu.append(ac.get_action('prefs').create_menu_item())
help_menu = gtk.Menu()
help_mitem = gtk.MenuItem("_Help")
help_mitem.set_submenu(help_menu)
help_menu.append(ac.get_action('about').create_menu_item())
self.append(file_mitem)
self.append(edit_mitem)
self.append(help_mitem)
|
jabesq/home-assistant
|
homeassistant/components/tradfri/switch.py
|
Python
|
apache-2.0
| 4,263
| 0
|
"""Support for IKEA Tradfri switches."""
import logging
from homeassistant.components.switch import SwitchDevice
from homeassistant.core import callback
from . import DOMAIN as TRADFRI_DOMAIN, KEY_API, KEY_GATEWAY
from .const import CONF_GATEWAY_ID
_LOGGER = logging.getLogger(__name__)
IKEA = 'IKEA of Sweden'
TRADFRI_SWITCH_MANAGER = 'Tradfri Switch Manager'
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Load Tradfri switches based on a config entry."""
gateway_id = config_entry.data[CONF_GATEWAY_ID]
api = hass.data[KEY_API][config_entry.entry_id]
gateway = hass.data[KEY_GATEWAY][config_entry.entry_id]
devices_commands = await api(gateway.get_devices())
devices = await api(devices_commands)
switches = [dev for dev in devices if dev.has_socket_control]
if switches:
async_add_entities(
TradfriSwitch(switch, api, gateway_id) for switch in switches)
class TradfriSwitch(SwitchDevice):
"""The platform class required by Home Assistant."""
def __init__(self, switch, api, gateway_id):
"""Initialize a switch."""
self._api = api
self._unique_id = "{}-{}".format(gateway_id, switch.id)
self._switch = None
self._socket_control = None
self._switch_data = None
self._name = None
self._available = True
self._gateway_id = gateway_id
self._refresh(switch)
@property
def unique_id(self):
"""Return unique ID for switch."""
return self._unique_id
@property
def device_info(self):
"""Return the device info."""
info = self._switch.device_info
return {
'identifiers': {
(TRADFRI_DOMAIN, self._switch.id)
},
'name': self._name,
'manufacturer': info.manufacturer,
'model': info.model_number,
'sw_version': info.firmware_version,
'via_device': (TRADFRI_DOMAIN, self._gateway_id),
}
async def async_added_to_hass(self):
"""Start thread when added to hass."""
self._async_start_observe()
@property
def available(self):
"""Return True if entity is available."""
return self._available
@property
def should_poll(self):
"""No polling needed for tradfri switch."""
return False
@property
def name(self):
"""Return the display name of this switch."""
return self._name
@property
def is_on(self):
"""Return true if switch is on."""
return self._switch_data.state
async def async_turn_off(self, **kwargs):
"""Instruct the switch to turn off."""
await self._api(self._socket_cont
|
rol.set_state(False))
async def async_turn_on(self, **kwargs):
"""Instruct the switch to turn on."""
await self._api(self._socket_control.set_state(True))
@callback
def _async_start_observe(self, exc=None):
"""Start observation o
|
f switch."""
from pytradfri.error import PytradfriError
if exc:
self._available = False
self.async_schedule_update_ha_state()
_LOGGER.warning("Observation failed for %s", self._name,
exc_info=exc)
try:
cmd = self._switch.observe(callback=self._observe_update,
err_callback=self._async_start_observe,
duration=0)
self.hass.async_create_task(self._api(cmd))
except PytradfriError as err:
_LOGGER.warning("Observation failed, trying again", exc_info=err)
self._async_start_observe()
def _refresh(self, switch):
"""Refresh the switch data."""
self._switch = switch
# Caching of switchControl and switch object
self._available = switch.reachable
self._socket_control = switch.socket_control
self._switch_data = switch.socket_control.sockets[0]
self._name = switch.name
@callback
def _observe_update(self, tradfri_device):
"""Receive new state data for this switch."""
self._refresh(tradfri_device)
self.async_schedule_update_ha_state()
|
gaborbernat/tox
|
tests/unit/package/test_package_parallel.py
|
Python
|
mit
| 4,272
| 0.000936
|
import os
import traceback
import py
from flaky import flaky
from tox.session.commands.run import sequential
@flaky(max_runs=3)
def test_tox_parallel_build_safe(initproj, cmd, mock_venv, monkeypatch):
initproj(
"env_var_test",
filedefs={
"tox.ini": """
[tox]
envlist = py
install_cmd = python -m -c 'print("ok")' -- {opts} {packages}'
[testenv]
commands = python -c 'import sys; print(sys.version)'
""",
},
)
# we try to recreate the following situation
# t1 starts and performs build
# t2 starts, but is blocked from t1 build lock to build
# t1 gets unblocked, t2 can now enter
# t1 is artificially blocked to run test command until t2 finishes build
# (parallel build package present)
# t2 package build finishes both t1 and t2 can now finish and clean up their build packages
import threading
import tox.package
t1_build_started = threading.Event()
t1_build_blocker = threading.Event()
t2_build_started = threading.Event()
t2_build_finished = threading.Event()
invoke_result = {}
def invoke_tox_in_thread(thread_name):
try:
result = cmd("--parallel--safe-build", "-vv")
except Exception as exception:
result = exception, traceback.format_exc()
invoke_result[thread_name] = result
prev_build_package = tox.package.build_package
with monkeypatch.context() as m:
def build_package(config, session):
t1_build_started.set()
t1_build_blocker.wait()
return prev_build_package(config, session)
m.setattr(tox.package, "build_package", build_package)
prev_run_test_env = sequential.runtestenv
def run_test_env(venv, redirect=False):
t2_build_finished.wait()
return prev_run_test_env(venv, redirect)
m.setattr(sequential, "runtestenv", run_test_env)
|
t1 = threading.Thread(target=invoke_tox_in_thread, args=("t1",))
t1.start()
t1_build_started.wait()
with monkeypatch.context() as m:
def build_package(config, session):
t2_build_started.set(
|
)
try:
return prev_build_package(config, session)
finally:
t2_build_finished.set()
m.setattr(tox.package, "build_package", build_package)
t2 = threading.Thread(target=invoke_tox_in_thread, args=("t2",))
t2.start()
# t2 should get blocked by t1 build lock
t2_build_started.wait(timeout=0.1)
assert not t2_build_started.is_set()
t1_build_blocker.set() # release t1 blocker -> t1 can now finish
# t1 at this point should block at run test until t2 build finishes
t2_build_started.wait()
t1.join() # wait for both t1 and t2 to finish
t2.join()
# all threads finished without error
for val in invoke_result.values():
if isinstance(val, tuple):
assert False, "{!r}\n{}".format(val[0], val[1])
err = "\n".join(
"{}=\n{}".format(k, v.err).strip() for k, v in invoke_result.items() if v.err.strip()
)
out = "\n".join(
"{}=\n{}".format(k, v.out).strip() for k, v in invoke_result.items() if v.out.strip()
)
for val in invoke_result.values():
assert not val.ret, "{}\n{}".format(err, out)
assert not err
# when the lock is hit we notify
lock_file = py.path.local().join(".tox", ".package.lock")
msg = "lock file {} present, will block until released".format(lock_file)
assert msg in out
# intermediate packages are removed at end of build
t1_package = invoke_result["t1"].session.getvenv("py").package
t2_package = invoke_result["t1"].session.getvenv("py").package
assert t1 != t2
assert not t1_package.exists()
assert not t2_package.exists()
# the final distribution remains
dist_after = invoke_result["t1"].session.config.distdir.listdir()
assert len(dist_after) == 1
sdist = dist_after[0]
assert t1_package != sdist
# our set_os_env_var is not thread-safe so clean-up TOX_WORK_DIR
os.environ.pop("TOX_WORK_DIR", None)
|
pllim/astropy
|
astropy/coordinates/funcs.py
|
Python
|
bsd-3-clause
| 13,747
| 0.0008
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module contains convenience functions for coordinate-related functionality.
This is generally just wrapping around the object-oriented coordinates
framework, but it is useful for some users who are used to more functional
interfaces.
"""
import warnings
from collections.abc import Sequence
import numpy as np
import erfa
from astropy import units as u
from astropy.constants import c
from astropy.io import ascii
from astropy.utils import isiterable, data
from .sky_coordinate import SkyCoord
from .builtin_frames import GCRS, PrecessedGeocentric
from .representation import SphericalRepresentation, CartesianRepresentation
from .builtin_frames.utils import get_jd12
__all__ = ['cartesian_to_spherical', 'spherical_to_cartesian', 'get_sun',
'get_constellation', 'concatenate_representations', 'concatenate']
def cartesian_to_spherical(x, y, z):
"""
Converts 3D rectangular cartesian coordinates to spherical polar
coordinates.
Note that the resulting angles are latitude/longitude or
elevation/azimuthal form. I.e., the origin is along the equator
rather than at the north pole.
.. note::
This function simply wraps functionality provided by the
`~astropy.coordinates.CartesianRepresentation` and
`~astropy.coordinates.SphericalRepresentation` classes. In general,
for both performance and readability, we suggest using these classes
directly. But for situations where a quick one-off conversion makes
sense, this function is provided.
Parameters
----------
x : scalar, array-like, or `~astropy.units.Quantity`
The first Cartesian coordinate.
y : scalar, array-like, or `~astropy.units.Quantity`
The second Cartesian coordinate.
z : scalar, array-like, or `~astropy.units.Quantity`
The third Cartesian coordinate.
Returns
-------
r : `~astropy.units.Quantity`
The radial coordinate (in the same units as the inputs).
lat : `~astropy.units.Quantity` ['angle']
The latitude in radians
lon : `~astropy.units.Quantity` ['angle']
The longitude in radians
"""
if not hasattr(x, 'unit'):
x = x * u.dimensionless_unscaled
if not hasattr(y, 'unit'):
y = y * u.dimensionless_unscaled
if not hasattr(z, 'unit'):
z = z * u.dimensionless_unscaled
cart = CartesianRepresentation(x, y, z)
sph = cart.represent_as(SphericalRepresentation)
return sph.distance, sph.lat, sph.lon
def spherical_to_cartesian(r, lat, lon):
"""
Converts spherical polar coordinates to rectangular cartesian
coordinates.
Note that the input angles should be in latitude/longitude or
elevation/azimuthal form. I.e., the origin is along the equator
rather than at the north pole.
.. not
|
e::
This is a low-level function used internally in
`astropy.co
|
ordinates`. It is provided for users if they really
want to use it, but it is recommended that you use the
`astropy.coordinates` coordinate systems.
Parameters
----------
r : scalar, array-like, or `~astropy.units.Quantity`
The radial coordinate (in the same units as the inputs).
lat : scalar, array-like, or `~astropy.units.Quantity` ['angle']
The latitude (in radians if array or scalar)
lon : scalar, array-like, or `~astropy.units.Quantity` ['angle']
The longitude (in radians if array or scalar)
Returns
-------
x : float or array
The first cartesian coordinate.
y : float or array
The second cartesian coordinate.
z : float or array
The third cartesian coordinate.
"""
if not hasattr(r, 'unit'):
r = r * u.dimensionless_unscaled
if not hasattr(lat, 'unit'):
lat = lat * u.radian
if not hasattr(lon, 'unit'):
lon = lon * u.radian
sph = SphericalRepresentation(distance=r, lat=lat, lon=lon)
cart = sph.represent_as(CartesianRepresentation)
return cart.x, cart.y, cart.z
def get_sun(time):
"""
Determines the location of the sun at a given time (or times, if the input
is an array `~astropy.time.Time` object), in geocentric coordinates.
Parameters
----------
time : `~astropy.time.Time`
The time(s) at which to compute the location of the sun.
Returns
-------
newsc : `~astropy.coordinates.SkyCoord`
The location of the sun as a `~astropy.coordinates.SkyCoord` in the
`~astropy.coordinates.GCRS` frame.
Notes
-----
The algorithm for determining the sun/earth relative position is based
on the simplified version of VSOP2000 that is part of ERFA. Compared to
JPL's ephemeris, it should be good to about 4 km (in the Sun-Earth
vector) from 1900-2100 C.E., 8 km for the 1800-2200 span, and perhaps
250 km over the 1000-3000.
"""
earth_pv_helio, earth_pv_bary = erfa.epv00(*get_jd12(time, 'tdb'))
# We have to manually do aberration because we're outputting directly into
# GCRS
earth_p = earth_pv_helio['p']
earth_v = earth_pv_bary['v']
# convert barycentric velocity to units of c, but keep as array for passing in to erfa
earth_v /= c.to_value(u.au/u.d)
dsun = np.sqrt(np.sum(earth_p**2, axis=-1))
invlorentz = (1-np.sum(earth_v**2, axis=-1))**0.5
properdir = erfa.ab(earth_p/dsun.reshape(dsun.shape + (1,)),
-earth_v, dsun, invlorentz)
cartrep = CartesianRepresentation(x=-dsun*properdir[..., 0] * u.AU,
y=-dsun*properdir[..., 1] * u.AU,
z=-dsun*properdir[..., 2] * u.AU)
return SkyCoord(cartrep, frame=GCRS(obstime=time))
# global dictionary that caches repeatedly-needed info for get_constellation
_constellation_data = {}
def get_constellation(coord, short_name=False, constellation_list='iau'):
"""
Determines the constellation(s) a given coordinate object contains.
Parameters
----------
coord : coordinate-like
The object to determine the constellation of.
short_name : bool
If True, the returned names are the IAU-sanctioned abbreviated
names. Otherwise, full names for the constellations are used.
constellation_list : str
The set of constellations to use. Currently only ``'iau'`` is
supported, meaning the 88 "modern" constellations endorsed by the IAU.
Returns
-------
constellation : str or string array
If ``coords`` contains a scalar coordinate, returns the name of the
constellation. If it is an array coordinate object, it returns an array
of names.
Notes
-----
To determine which constellation a point on the sky is in, this precesses
to B1875, and then uses the Delporte boundaries of the 88 modern
constellations, as tabulated by
`Roman 1987 <http://cdsarc.u-strasbg.fr/viz-bin/Cat?VI/42>`_.
"""
if constellation_list != 'iau':
raise ValueError("only 'iau' us currently supported for constellation_list")
# read the data files and cache them if they haven't been already
if not _constellation_data:
cdata = data.get_pkg_data_contents('data/constellation_data_roman87.dat')
ctable = ascii.read(cdata, names=['ral', 'rau', 'decl', 'name'])
cnames = data.get_pkg_data_contents('data/constellation_names.dat', encoding='UTF8')
cnames_short_to_long = dict([(l[:3], l[4:])
for l in cnames.split('\n')
if not l.startswith('#')])
cnames_long = np.array([cnames_short_to_long[nm] for nm in ctable['name']])
_constellation_data['ctable'] = ctable
_constellation_data['cnames_long'] = cnames_long
else:
ctable = _constellation_data['ctable']
cnames_long = _constellation_data['cnames_long']
isscalar = coord.isscalar
# if it is geocentric, we reproduce the frame but with the 1875 equinox,
# which is where the constellations are defined
# this yi
|
tpubben/SequoiaStacking
|
parrotStacking.py
|
Python
|
mit
| 3,776
| 0.005297
|
''' This script is prepared by Tyler Pubben and is licensed under the MIT license framework.
It is free to use and distribute however please reference http://www.tjscientific.c
|
om or my
GIT repository at https://github.com/tpubben/SequoiaStacking/'''
import numpy as np
import cv2
import os
def align_images(in_fldr, out_fldr, moving, fixed):
MIN_MATCH_COUNT = 10
moving_im = cv2.imread(moving, 0) # image to be distorted
fixed_im = cv2.imread(fixed, 0)
|
# image to be matched
# Initiate SIFT detector
sift = cv2.xfeatures2d.SIFT_create()
# find the keypoints and descriptors with SIFT
kp1, des1 = sift.detectAndCompute(moving_im, None)
kp2, des2 = sift.detectAndCompute(fixed_im, None)
# use FLANN method to match keypoints. Brute force matches not appreciably better
# and added processing time is significant.
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5)
search_params = dict(checks=50)
flann = cv2.FlannBasedMatcher(index_params, search_params)
matches = flann.knnMatch(des1, des2, k=2)
# store all the good matches following Lowe's ratio test.
good = []
for m, n in matches:
if m.distance < 0.7 * n.distance:
good.append(m)
if len(good) > MIN_MATCH_COUNT:
src_pts = np.float32([kp1[m.queryIdx].pt for m in good]).reshape(-1, 1, 2)
dst_pts = np.float32([kp2[m.trainIdx].pt for m in good]).reshape(-1, 1, 2)
M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC, 5.0)
h, w = moving_im.shape # shape of input images, needs to remain the same for output
outimg = cv2.warpPerspective(moving_im, M, (w, h))
return outimg
else:
print("Not enough matches are found for moving image")
matchesMask = None
ch1 = int(input('Which band do you want for channel 1 on output image? Green(1), Red(2), Red Edge(3) or NIR(4)'))
ch2 = int(input('Which band do you want for channel 2 on output image? Green(1), Red(2), Red Edge(3) or NIR(4)'))
ch3 = int(input('Which band do you want for channel 3 on output image? Green(1), Red(2), Red Edge(3) or NIR(4)'))
channel_order = [ch1,ch2,ch3]
output_folder = str(input('Enter path to output folder: '))
input_folder = str(input('Enter path to input folder: '))
image_list = [f for f in os.listdir(input_folder) if os.path.isfile(os.path.join(input_folder,f))]
image_tups = zip(*[image_list[i::4] for i in range(4)])
# set the fixed image to minimize amount of translation that needs to occur
if 1 in channel_order and 2 in channel_order and 3 in channel_order:
fixed_image = 1
moving_im1 = 0
moving_im2 = 2
elif 2 in channel_order and 3 in channel_order and 4 in channel_order:
fixed_image = 2
moving_im1 = 1
moving_im2 = 3
elif 1 in channel_order and 3 in channel_order and 4 in channel_order:
fixed_image = 2
moving_im1 = 0
moving_im2 = 3
elif 1 in channel_order and 2 in channel_order and 4 in channel_order:
fixed_image = 1
moving_im1 = 0
moving_im2 = 3
# iterate through each set of 4 images
for tup in image_tups:
band1 = align_images(input_folder, output_folder, os.path.join(input_folder, tup[moving_im1]),
os.path.join(input_folder, tup[fixed_image]))
band2 = align_images(input_folder, output_folder, os.path.join(input_folder, tup[moving_im2]),
os.path.join(input_folder, tup[fixed_image]))
band3 = cv2.imread(os.path.join(input_folder, tup[fixed_image]), 0)
merged = cv2.merge((band1, band2, band3))
cv2.imwrite(os.path.join(output_folder, tup[fixed_image][-30:-4]) + '_merged.jpg', merged)
|
hlin117/statsmodels
|
statsmodels/stats/tests/test_diagnostic.py
|
Python
|
bsd-3-clause
| 40,146
| 0.007163
|
# -*- coding: utf-8 -*-
"""Tests for Regression Diagnostics and Specification Tests
Created on Thu Feb 09 13:19:47 2012
Author: Josef Perktold
License: BSD-3
current
|
ly all tests are against R
"""
#import warnings
#warnings.simplefilter("default")
# ResourceWarning doesn't
|
exist in python 2
#warnings.simplefilter("ignore", ResourceWarning)
import os
import numpy as np
from numpy.testing import (assert_, assert_almost_equal, assert_equal,
assert_approx_equal, assert_allclose)
from nose import SkipTest
from statsmodels.regression.linear_model import OLS, GLSAR
from statsmodels.tools.tools import add_constant
from statsmodels.datasets import macrodata
import statsmodels.stats.sandwich_covariance as sw
import statsmodels.stats.diagnostic as smsdia
import json
#import statsmodels.sandbox.stats.diagnostic as smsdia
import statsmodels.stats.outliers_influence as oi
cur_dir = os.path.abspath(os.path.dirname(__file__))
def compare_t_est(sp, sp_dict, decimal=(14, 14)):
assert_almost_equal(sp[0], sp_dict['statistic'], decimal=decimal[0])
assert_almost_equal(sp[1], sp_dict['pvalue'], decimal=decimal[1])
def notyet_atst():
d = macrodata.load().data
realinv = d['realinv']
realgdp = d['realgdp']
realint = d['realint']
endog = realinv
exog = add_constant(np.c_[realgdp, realint])
res_ols1 = OLS(endog, exog).fit()
#growth rates
gs_l_realinv = 400 * np.diff(np.log(d['realinv']))
gs_l_realgdp = 400 * np.diff(np.log(d['realgdp']))
lint = d['realint'][:-1]
tbilrate = d['tbilrate'][:-1]
endogg = gs_l_realinv
exogg = add_constant(np.c_[gs_l_realgdp, lint])
exogg2 = add_constant(np.c_[gs_l_realgdp, tbilrate])
res_ols = OLS(endogg, exogg).fit()
res_ols2 = OLS(endogg, exogg2).fit()
#the following were done accidentally with res_ols1 in R,
#with original Greene data
params = np.array([-272.3986041341653, 0.1779455206941112,
0.2149432424658157])
cov_hac_4 = np.array([1321.569466333051, -0.2318836566017612,
37.01280466875694, -0.2318836566017614, 4.602339488102263e-05,
-0.0104687835998635, 37.012804668757, -0.0104687835998635,
21.16037144168061]).reshape(3,3, order='F')
cov_hac_10 = np.array([2027.356101193361, -0.3507514463299015,
54.81079621448568, -0.350751446329901, 6.953380432635583e-05,
-0.01268990195095196, 54.81079621448564, -0.01268990195095195,
22.92512402151113]).reshape(3,3, order='F')
#goldfeld-quandt
het_gq_greater = dict(statistic=13.20512768685082, df1=99, df2=98,
pvalue=1.246141976112324e-30, distr='f')
het_gq_less = dict(statistic=13.20512768685082, df1=99, df2=98, pvalue=1.)
het_gq_2sided = dict(statistic=13.20512768685082, df1=99, df2=98,
pvalue=1.246141976112324e-30, distr='f')
#goldfeld-quandt, fraction = 0.5
het_gq_greater_2 = dict(statistic=87.1328934692124, df1=48, df2=47,
pvalue=2.154956842194898e-33, distr='f')
gq = smsdia.het_goldfeldquandt(endog, exog, split=0.5)
compare_t_est(gq, het_gq_greater, decimal=(13, 14))
assert_equal(gq[-1], 'increasing')
harvey_collier = dict(stat=2.28042114041313, df=199,
pvalue=0.02364236161988260, distr='t')
#hc = harvtest(fm, order.by=ggdp , data = list())
harvey_collier_2 = dict(stat=0.7516918462158783, df=199,
pvalue=0.4531244858006127, distr='t')
##################################
class TestDiagnosticG(object):
def __init__(self):
d = macrodata.load().data
#growth rates
gs_l_realinv = 400 * np.diff(np.log(d['realinv']))
gs_l_realgdp = 400 * np.diff(np.log(d['realgdp']))
lint = d['realint'][:-1]
tbilrate = d['tbilrate'][:-1]
endogg = gs_l_realinv
exogg = add_constant(np.c_[gs_l_realgdp, lint])
exogg2 = add_constant(np.c_[gs_l_realgdp, tbilrate])
exogg3 = add_constant(np.c_[gs_l_realgdp])
res_ols = OLS(endogg, exogg).fit()
res_ols2 = OLS(endogg, exogg2).fit()
res_ols3 = OLS(endogg, exogg3).fit()
self.res = res_ols
self.res2 = res_ols2
self.res3 = res_ols3
self.endog = self.res.model.endog
self.exog = self.res.model.exog
def test_basic(self):
#mainly to check I got the right regression
#> mkarray(fm$coefficients, "params")
params = np.array([-9.48167277465485, 4.3742216647032,
-0.613996969478989])
assert_almost_equal(self.res.params, params, decimal=12)
def test_hac(self):
res = self.res
#> nw = NeweyWest(fm, lag = 4, prewhite = FALSE, verbose=TRUE)
#> nw2 = NeweyWest(fm, lag=10, prewhite = FALSE, verbose=TRUE)
#> mkarray(nw, "cov_hac_4")
cov_hac_4 = np.array([1.385551290884014, -0.3133096102522685,
-0.0597207976835705, -0.3133096102522685, 0.1081011690351306,
0.000389440793564336, -0.0597207976835705, 0.000389440793564339,
0.0862118527405036]).reshape(3,3, order='F')
#> mkarray(nw2, "cov_hac_10")
cov_hac_10 = np.array([1.257386180080192, -0.2871560199899846,
-0.03958300024627573, -0.2871560199899845, 0.1049107028987101,
0.0003896205316866944, -0.03958300024627578, 0.0003896205316866961,
0.0985539340694839]).reshape(3,3, order='F')
cov = sw.cov_hac_simple(res, nlags=4, use_correction=False)
bse_hac = sw.se_cov(cov)
assert_almost_equal(cov, cov_hac_4, decimal=14)
assert_almost_equal(bse_hac, np.sqrt(np.diag(cov)), decimal=14)
cov = sw.cov_hac_simple(res, nlags=10, use_correction=False)
bse_hac = sw.se_cov(cov)
assert_almost_equal(cov, cov_hac_10, decimal=14)
assert_almost_equal(bse_hac, np.sqrt(np.diag(cov)), decimal=14)
def test_het_goldfeldquandt(self):
#TODO: test options missing
#> gq = gqtest(fm, alternative='greater')
#> mkhtest_f(gq, 'het_gq_greater', 'f')
het_gq_greater = dict(statistic=0.5313259064778423,
pvalue=0.9990217851193723,
parameters=(98, 98), distr='f')
#> gq = gqtest(fm, alternative='less')
#> mkhtest_f(gq, 'het_gq_less', 'f')
het_gq_less = dict(statistic=0.5313259064778423,
pvalue=0.000978214880627621,
parameters=(98, 98), distr='f')
#> gq = gqtest(fm, alternative='two.sided')
#> mkhtest_f(gq, 'het_gq_two_sided', 'f')
het_gq_two_sided = dict(statistic=0.5313259064778423,
pvalue=0.001956429761255241,
parameters=(98, 98), distr='f')
#> gq = gqtest(fm, fraction=0.1, alternative='two.sided')
#> mkhtest_f(gq, 'het_gq_two_sided_01', 'f')
het_gq_two_sided_01 = dict(statistic=0.5006976835928314,
pvalue=0.001387126702579789,
parameters=(88, 87), distr='f')
#> gq = gqtest(fm, fraction=0.5, alternative='two.sided')
#> mkhtest_f(gq, 'het_gq_two_sided_05', 'f')
het_gq_two_sided_05 = dict(statistic=0.434815645134117,
pvalue=0.004799321242905568,
parameters=(48, 47), distr='f')
endogg, exogg = self.endog, self.exog
#tests
gq = smsdia.het_goldfeldquandt(endogg, exogg, split=0.5)
compare_t_est(gq, het_gq_greater, decimal=(14, 14))
assert_equal(gq[-1], 'increasing')
gq = smsdia.het_goldfeldquandt(endogg, exogg, split=0.5,
alternative='decreasing')
compare_t_est(gq, het_gq_less, decimal=(14, 14))
assert_equal(gq[-1], 'decreasing')
gq = smsdia.het_goldfeldquandt(endogg, exogg, split=0.5,
alternative='two-sided')
compare_t_est(gq, het_gq_two_sided, decimal=
|
dzoep/khal
|
khal/ui/widgets.py
|
Python
|
mit
| 14,425
| 0.000971
|
# Copyright (c) 2013-2016 Christian Geier et al.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""A collection of (reusable) urwid widgets
Widgets that are specific to calendaring/khal should go into __init__.py or,
if they are large, into their own files
"""
from datetime import date, datetime, timedelta
import re
import urwid
class DateConversionError(Exception):
pass
def delete_last_word(text, number=1):
"""delete last `number` of words from text"""
words = re.findall(r"[\w]+|[^\w\s]", text, re.UNICODE)
for one in range(1, number + 1):
text = text.rstrip()
if text == '':
return text
text = text[:len(text) - len(words[-one])]
return text
def delete_till_beginning_of_line(text):
"""delete till beginning of line"""
if text.rfind("\n") == -1:
return ''
return text[0:text.rfind("\n") + 1]
def delete_till_end_of_line(text):
"""delete till beginning of line"""
if text.find("\n") == -1:
return ''
return text[text.find("\n"):]
def goto_beginning_of_line(text):
if text.rfind("\n") == -1:
return 0
return text.rfind("\n") + 1
def goto_end_of_line(text):
if text.find("\n") == -1:
return len(text)
return text.find("\n")
class ExtendedEdit(urwid.Edit):
"""A text editing widget supporting some more editing commands"""
def keypress(self, size, key):
if key == 'ctrl w':
self._delete_word()
elif key == 'ctrl u':
self._delete_till_beginning_of_line()
elif key == 'ctrl k':
self._delete_till_end_of_line()
elif key == 'ctrl a':
self._goto_beginning_of_line()
elif key == 'ctrl e':
self._goto_end_of_line()
else:
return super(ExtendedEdit, self).keypress(size, key)
def _delete_word(self):
"""delete word before cursor"""
text = self.get_edit_text()
f_text = delete_last_word(text[:self.edit_pos])
self.set_edit_text(f_text + text[self.edit_pos:])
self.set_edit_pos(len(f_text))
def _delete_till_beginning_of_line(self):
"""delete till start of line before cursor"""
text = self.get_edit_text()
f_text = delete_till_beginning_of_line(text[:self.edit_pos])
self.set_edit_text(f_text + text[self.edit_pos:])
self.set_edit_pos(len(f_text))
def _delete_till_end_of_line(self):
"""delete till end of line before cursor"""
text = self.get_edit_text()
f_text = delete_till_end_of_line(text[self.edit_pos:])
self.set_edit_text(text[:self.edit_pos] + f_text)
def _goto_beginning_of_line(self):
text = self.get_edit_text()
self.set_edit_pos(goto_beginning_of_line(text[:self.edit_pos]))
def _goto_end_of_line(self):
text = self.get_edit_text()
self.set_edit_pos(goto_end_of_line(text[self.edit_pos:]) + self.edit_pos)
class DateTimeWidget(ExtendedEdit):
def __init__(self, dateformat, on_date_change=lambda x: None, **kwargs):
self.dateformat = dateformat
self.on_date_change = on_date_change
super().__init__(wrap='any', **kwargs)
def keypress(self, size, key):
if key == 'ctrl x':
self.decrease()
return None
elif key == 'ctrl a':
self.increase()
return None
if (
key in ['up', 'down', 'tab', 'shift tab'] or
(key in ['right'] and self.edit_pos >= len(self.edit_text)) or
(key in ['left'] and self.edit_pos == 0)):
# when leaving the current Widget we check if currently
# entered value is valid and if so pass the new value
try:
new_date = self._get_current_value()
except DateConversionError:
pass
else:
self.on_date_change(new_date)
return super(DateTimeWidget, self).keypress(size, key)
def increase(self):
"""call to increase the datefield by self.timedelta"""
self._crease(self.dtype.__add__)
def decrease(self):
"""call to decrease the datefield by self.timedelta"""
self._crease(self.dtype.__sub__)
def _crease(self, fun):
"""common implementation for `self.i
|
ncrease` and `self.decrease`"""
try:
new_date = fun(self._get_current_value(), self.timedelta)
self.on_date_change(new_date)
self.set_edit_text(new_date.strftime(self.dateformat))
except DateConversionError:
pass
def set_value(self, new_date):
"""set a new value for this widget
:type new_date: datetime.date
"""
self.set_edit_text(new
|
_date.strftime(self.dateformat))
class DateWidget(DateTimeWidget):
dtype = date
timedelta = timedelta(days=1)
def _get_current_value(self):
try:
new_date = datetime.strptime(self.get_edit_text(), self.dateformat).date()
except ValueError:
raise DateConversionError
else:
return new_date
class TimeWidget(DateTimeWidget):
dtype = datetime
timedelta = timedelta(minutes=15)
def _get_current_value(self):
try:
new_datetime = datetime.strptime(self.get_edit_text(), self.dateformat)
except ValueError:
raise DateConversionError
else:
return new_datetime
class Choice(urwid.PopUpLauncher):
def __init__(self, choices, active, decorate_func=None):
self.choices = choices
self._decorate = decorate_func or (lambda x: x)
self.active = self._original = active
def create_pop_up(self):
pop_up = ChoiceList(self)
urwid.connect_signal(pop_up, 'close',
lambda button: self.close_pop_up())
return pop_up
def get_pop_up_parameters(self):
return {'left': 0,
'top': 1,
'overlay_width': 32,
'overlay_height': len(self.choices)}
@property
def changed(self):
return self._active != self._original
@property
def active(self):
return self._active
@active.setter
def active(self, val):
self._active = val
self.button = urwid.Button(self._decorate(self._active))
urwid.PopUpLauncher.__init__(self, self.button)
urwid.connect_signal(self.button, 'click',
lambda button: self.open_pop_up())
class ChoiceList(urwid.WidgetWrap):
signals = ['close']
def __init__(self, parent):
self.parent = parent
buttons = []
for c in parent.choices:
buttons.append(
urwid.Button(parent._decorate(c),
on_press=self.set_choice, user_data=c)
)
pile = NPile(buttons, outermost=True)
fill = urwid.Filler(pile)
urwid.WidgetWrap.__init__(self, urwid.AttrMap(fill, 'popupbg'))
def set_choice(self, button, account):
self.parent.active = account
self._emit("close")
class SupportsNext(object):
|
lnhubbell/tweetTrack
|
tweetTrack/wsgi.py
|
Python
|
mit
| 651
| 0
|
"""WSGI application."""
import os
from sys import argv
from werkzeug.serving import run_simple
from werkzeug.wsgi import DispatcherMiddleware
from tweetTrack.app import app
application = DispatcherMiddleware(app)
if __
|
name__ == '__main__':
if len(argv) < 2 or argv[1] == 'Dev':
os.environ['FLASK_CONFIG'] = 'Dev'
run_simple(
'localhost',
5000,
application,
__debug__
)
else:
os.environ['FLASK_CONFIG'] = argv[1].title()
print(os.enviro
|
n['FLASK_CONFIG'])
run_simple(
'localhost',
5000,
application,
)
|
kevinconway/PyPerf
|
tests/profilers/test_runtime.py
|
Python
|
apache-2.0
| 672
| 0
|
"""Test suite for the runtime profiler."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from pyperf.profilers import runtime
def test_runtime_gives_reasonable_results():
"""Ensure runtime is measured within some degree of reason.
The expectation is that obviously longer running code is measured as longer
running by the profiler.
"""
profiler = runtime.RuntimeProfiler()
small, _ = profiler(setup='pass', code='for x
|
in range(100): pass')
large, _ = profiler(setup='pass', code='for x in range(10000): pass')
assert small < larg
|
e
|
niklasf/python-prompt-toolkit
|
examples/system-prompt.py
|
Python
|
bsd-3-clause
| 331
| 0.003021
|
#!/usr/bin/env python
from __future__ import unicode_literals
from prompt_toolkit import prompt
if __name__ == '__main__':
print('If you press meta-! or esc-! at the
|
following p
|
rompt, you can enter system commands.')
answer = prompt('Give me some input: ', enable_system_bindings=True)
print('You said: %s' % answer)
|
jorgemira/euler-py
|
p025.py
|
Python
|
apache-2.0
| 383
| 0.002611
|
'''Problem 25 from project Euler: 1000-digit Fibonacci number
https://projecteuler.net/problem=25'''
RESULT = 478
|
2
def solve():
'''Main function'''
digits = 1000
fib1 = 1
fib2 = 1
nth = 2
top = 10 ** (digits - 1)
while fib2 < top:
fib
|
1, fib2 = fib2, fib1 + fib2
nth += 1
return nth
if __name__ == '__main__':
print solve()
|
si618/pi-time
|
pi_time/laptimer/laptimer/laptimer.py
|
Python
|
gpl-3.0
| 1,522
| 0.003285
|
i
|
mport pi_time
from os import path
from autobahn.twisted.wamp import ApplicationSession
from autobahn.twisted.util import sleep
from autobahn.wamp.exception import ApplicationError
from twisted.interne
|
t.defer import inlineCallbacks
from twisted.python import log
from pi_time import settings
from pi_time.api import Api
class LaptimerAppSession(ApplicationSession):
@inlineCallbacks
def onJoin(self, details):
config_dir = path.dirname(path.dirname(path.realpath(__file__)))
config_file = path.join(config_dir, 'config.json')
self.api = Api(session=self, config_file=config_file)
# Methods to publish events from laptimer node to laptimer clients
#def player_changed(msg):
# yield self.publish(settings.URI_PREFIX + 'player_changed', msg)
# Subscribe to events from laptimer node
#yield self.subscribe(player_changed,
# settings.URI_PREFIX + 'player_changed')
# Register procedures available from laptimer clients
yield self.register(self.api)
log.msg('Pi-time laptimer v{} ready'.format(pi_time.VERSION))
# Broadcast to all sensor sessions that laptimer session started
yield self.publish(settings.URI_PREFIX + 'laptimer_started',
str(details))
@inlineCallbacks
def onLeave(self, details):
# Broadcast to all sensor sessions that laptimer session stopped
yield self.publish(settings.URI_PREFIX + 'laptimer_stopped',
str(details))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.