repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
markofu/scripts
|
nmap/nmap/zenmap/zenmapGUI/higwidgets/higspinner.py
|
Python
|
gpl-2.0
| 21,689
| 0.000277
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ***********************IMPORTANT NMAP LICENSE TERMS************************
# * *
# * The Nmap Security Scanner is (C) 1996-2013 Insecure.Com LLC. Nmap is *
# * also a registered trademark of Insecure.Com LLC. This program is free *
# * software; you may redistribute and/or modify it under the terms of the *
# * GNU General Public License as published by the Free Software *
# * Foundation; Version 2 ("GPL"), BUT ONLY WITH ALL OF THE CLARIFICATIONS *
# * AND EXCEPTIONS DESCRIBED HEREIN. This guarantees your right to use, *
# * modify, and redistribute this software under certain conditions. If *
# * you wish to embed Nmap technology into proprietary software, we sell *
# * alternative licenses (contact sales@nmap.com). Dozens of software *
# * vendors already license Nmap technology such as host discovery, port *
# * scanning, OS detection, version detection, and the Nmap Scripting *
# * Engine. *
# * *
# * Note that the GPL places important restrictions on "derivative works", *
# * yet it does not provide a detailed definition of that term. To avoid *
# * misunderstandings, we interpret that term as broadly as copyright law *
# * allows. For example, we consider an application to constitute a *
# * derivative work for the purpose of this license if it does any of the *
# * following with any software or content covered by this license *
# * ("Covered Software"): *
# * *
# * o Integrates source code from Covered Software. *
# * *
# * o Reads or includes copyrighted data files, such as Nmap's nmap-os-db *
# * or nmap-service-probes. *
# * *
# * o Is designed specifically to execute Covered Software and parse the *
# * results (as opposed to typical shell or execution-menu apps, which will *
# * execute anything you tell them to). *
# * *
# * o Includes Covered Software in a proprietary executable installer. The *
# * installers produced by InstallShield are an example of this. Including *
# * Nmap with other software in compressed or archival form does not *
# * trigger this provision, provided appropriate open source decompression *
# * or de-archiving s
|
oftware is widely available for no charge. For the *
# * purposes of this license, an installer is considered to include Covered *
# * Software even if it
|
actually retrieves a copy of Covered Software from *
# * another source during runtime (such as by downloading it from the *
# * Internet). *
# * *
# * o Links (statically or dynamically) to a library which does any of the *
# * above. *
# * *
# * o Executes a helper program, module, or script to do any of the above. *
# * *
# * This list is not exclusive, but is meant to clarify our interpretation *
# * of derived works with some common examples. Other people may interpret *
# * the plain GPL differently, so we consider this a special exception to *
# * the GPL that we apply to Covered Software. Works which meet any of *
# * these conditions must conform to all of the terms of this license, *
# * particularly including the GPL Section 3 requirements of providing *
# * source code and allowing free redistribution of the work as a whole. *
# * *
# * As another special exception to the GPL terms, Insecure.Com LLC grants *
# * permission to link the code of this program with any version of the *
# * OpenSSL library which is distributed under a license identical to that *
# * listed in the included docs/licenses/OpenSSL.txt file, and distribute *
# * linked combinations including the two. *
# * *
# * Any redistribution of Covered Software, including any derived works, *
# * must obey and carry forward all of the terms of this license, including *
# * obeying all GPL rules and restrictions. For example, source code of *
# * the whole work must be provided and free redistribution must be *
# * allowed. All GPL references to "this License", are to be treated as *
# * including the terms and conditions of this license text as well. *
# * *
# * Because this license imposes special exceptions to the GPL, Covered *
# * Work may not be combined (even as part of a larger work) with plain GPL *
# * software. The terms, conditions, and exceptions of this license must *
# * be included as well. This license is incompatible with some other open *
# * source licenses as well. In some cases we can relicense portions of *
# * Nmap or grant special permissions to use it in other open source *
# * software. Please contact fyodor@nmap.org with any such requests. *
# * Similarly, we don't incorporate incompatible open source software into *
# * Covered Software without special permission from the copyright holders. *
# * *
# * If you have any questions about the licensing restrictions on using *
# * Nmap in other works, are happy to help. As mentioned above, we also *
# * offer alternative license to integrate Nmap into proprietary *
# * applications and appliances. These contracts have been sold to dozens *
# * of software vendors, and generally include a perpetual license as well *
# * as providing for priority support and updates. They also fund the *
# * continued development of Nmap. Please email sales@nmap.com for further *
# * information. *
# * *
# * If you have received a written license agreement or contract for *
# * Covered Software stating terms other than these, you may choose to use *
# * and redistribute Covered Software under those terms instead of these. *
# * *
# * Source is provided to this software because we believe users have a *
# * right to know exactly what a program is going to do before they run it. *
# * This also allows you to audit the software for security holes (none *
# * have been found so far). *
# * *
# * Source code also allows you to port Nmap to new platforms, fix bugs, *
# * and add new features. You are highly encouraged to send your changes *
# * to the dev@nmap.org mailing list for possible incorporation into the *
# * main distribution. By sending these changes to Fyodor or one of the *
# * Insecure.Org development mailing lists, or checking them into the Nmap *
# * source code repository, it is understood (unless you specify otherwise) *
# * that you are offering the Nmap Project (Insecure.Com LLC) the *
# * unlimited, non-exclusive right to reuse, modify, and relicense the *
# * code. Nmap will
|
htwenhe/DJOA
|
env/Lib/site-packages/openpyxl/writer/workbook.py
|
Python
|
mit
| 5,683
| 0.003519
|
from __future__ import absolute_import
# Copyright (c) 2010-2017 openpyxl
"""Write the workbook global settings to the archive."""
from copy import copy
from openpyxl.utils import absolute_coordinate, quote_sheetname
from openpyxl.xml.constants import (
ARC_APP,
ARC_CORE,
ARC_WORKBOOK,
PKG_REL_NS,
CUSTOMUI_NS,
ARC_ROOT_RELS,
)
from openpyxl.xml.functions import tostring, fromstring
from openpyxl.worksheet import Worksheet
from openpyxl.chartsheet import Chartsheet
from openpyxl.packaging.relationship import Relationship, RelationshipList
from openpyxl.workbook.defined_name import DefinedName
from openpyxl.workbook.external_reference import ExternalReference
from openpyxl.workbook.parser import ChildSheet, WorkbookPackage
from openpyxl.workbook.properties import CalcProperties, WorkbookProperties
from openpyxl.workbook.views import BookView
from openpyxl
|
.utils.datetime import CALENDAR_MAC_1904
def write_root_rels(workbook):
"""Write the relationships xml."""
rels = RelationshipList()
rel = Relationship(type="officeDocument", Target=ARC_WORKBOOK)
rels.append(rel)
rel = Relationship(Target=ARC_CORE, Type="%s/metadata/core-properties" % PKG_REL_NS)
rels.append(rel)
rel = Relationship(type="extended-properties", Target=ARC_APP)
rels.append(rel)
if workbook.vba_archive
|
is not None:
# See if there was a customUI relation and reuse it
xml = fromstring(workbook.vba_archive.read(ARC_ROOT_RELS))
root_rels = RelationshipList.from_tree(xml)
for rel in root_rels.find(CUSTOMUI_NS):
rels.append(rel)
return tostring(rels.to_tree())
def get_active_sheet(wb):
"""
Return the index of the active sheet.
If the sheet set to active is hidden return the next visible sheet or None
"""
visible_sheets = [idx for idx, sheet in enumerate(wb._sheets) if sheet.sheet_state == "visible"]
if not visible_sheets:
raise IndexError("At least one sheet must be visible")
idx = wb._active_sheet_index
sheet = wb.active
if sheet and sheet.sheet_state == "visible":
return idx
for idx in visible_sheets[idx:]:
wb.active = idx
return idx
return None
def write_workbook(workbook):
"""Write the core workbook xml."""
wb = workbook
wb.rels = RelationshipList()
root = WorkbookPackage()
props = WorkbookProperties() # needs a mapping to the workbook for preservation
if wb.code_name is not None:
props.codeName = wb.code_name
if wb.excel_base_date == CALENDAR_MAC_1904:
props.date1904 = True
root.workbookPr = props
# workbook protection
root.workbookProtection = wb.security
# book views
active = get_active_sheet(wb)
view = BookView(activeTab=active)
root.bookViews =[view]
# worksheets
for idx, sheet in enumerate(wb._sheets, 1):
sheet_node = ChildSheet(name=sheet.title, sheetId=idx, id="rId{0}".format(idx))
rel = Relationship(type=sheet._rel_type, Target=sheet.path)
wb.rels.append(rel)
if not sheet.sheet_state == 'visible':
if len(wb._sheets) == 1:
raise ValueError("The only worksheet of a workbook cannot be hidden")
sheet_node.state = sheet.sheet_state
root.sheets.append(sheet_node)
# external references
for link in wb._external_links:
# need to match a counter with a workbook's relations
rId = len(wb.rels) + 1
rel = Relationship(type=link._rel_type, Target=link.path)
wb.rels.append(rel)
ext = ExternalReference(id=rel.id)
root.externalReferences.append(ext)
# Defined names
defined_names = copy(wb.defined_names) # don't add special defns to workbook itself.
# Defined names -> autoFilter
for idx, sheet in enumerate(wb.worksheets):
auto_filter = sheet.auto_filter.ref
if auto_filter:
name = DefinedName(name='_FilterDatabase', localSheetId=idx, hidden=True)
name.value = u"{0}!{1}".format(quote_sheetname(sheet.title),
absolute_coordinate(auto_filter)
)
defined_names.append(name)
# print titles
if sheet.print_titles:
name = DefinedName(name="Print_Titles", localSheetId=idx)
name.value = ",".join([u"{0}!{1}".format(quote_sheetname(sheet.title), r)
for r in sheet.print_titles.split(",")])
defined_names.append(name)
# print areas
if sheet.print_area:
name = DefinedName(name="Print_Area", localSheetId=idx)
name.value = ",".join([u"{0}!{1}".format(quote_sheetname(sheet.title), r)
for r in sheet.print_area])
defined_names.append(name)
root.definedNames = defined_names
root.calcPr = CalcProperties(calcId=124519, fullCalcOnLoad=True)
return tostring(root.to_tree())
def write_workbook_rels(workbook):
"""Write the workbook relationships xml."""
wb = workbook
strings = Relationship(type='sharedStrings', Target='sharedStrings.xml')
wb.rels.append(strings)
styles = Relationship(type='styles', Target='styles.xml')
wb.rels.append(styles)
theme = Relationship(type='theme', Target='theme/theme1.xml')
wb.rels.append(theme)
if workbook.vba_archive:
vba = Relationship(type='', Target='vbaProject.bin')
vba.Type ='http://schemas.microsoft.com/office/2006/relationships/vbaProject'
wb.rels.append(vba)
return tostring(wb.rels.to_tree())
|
amureki/lunch-with-channels
|
core/utils.py
|
Python
|
mit
| 2,997
| 0
|
import random
from .exceptions import ClientError
def catch_client_error(func):
"""
Decorator to catch the ClientError exception and translate it into a reply.
"""
def inner(message):
try:
return func(message)
except ClientError as e:
# If we catch a client error, tell it to send an error string
# back to the client on their reply channel
e.send_to(message.reply_channel)
return inner
def generate_name():
adjective_list = [
'Admiring',
'Adoring',
'Agitated',
'Amazing',
'Angry',
'Awesome',
'Backstabbing',
'Berserk',
'Big',
'Boring',
'Clever',
'Compassionate',
'Condescending',
'Cranky',
'Desperate',
'Determined',
'Distracted',
'Dreamy',
'Drunk',
'Ecstatic',
'Elated',
'Elegant',
'Evil',
'Fervent',
'Focused',
'Furious',
'Gigantic',
'Gloomy',
'Goofy',
'Grave',
'Happy',
'High',
'Hopeful',
'Hungry',
'Infallible',
'Jolly',
'Jovial',
'Kickass',
'Lonely',
'Loving',
'Mad',
'Modest',
'Naughty',
'Nauseous',
'Nostalgic',
'Pedantic',
'Pensive',
'Prickly',
'Reverent',
'Romantic',
'Sad',
'Serene',
'Sharp',
'Sick',
'Silly',
'Sleepy',
'Small',
'Stoic',
'Stupefied',
'Suspicious',
'Tender',
'Thirsty',
'Tiny',
'Trusting',
]
subject_list = [
'Kraven the Hunter',
'Juggernaut',
'Marvel Girl',
'Swarm',
'Black Bolt',
'Loki Lauyefson',
'Ghost Rider',
'Professor X',
'Quicksilver',
'Kingpin',
'Doctor Octopus',
'Green Goblin',
'Red Skull',
'Colossus',
'Shadowcat',
'Cyclops',
'Havok',
'Luke Cage',
'Black Widow',
'Beast',
'The Multiple Man',
'Silver Surfer',
'Ultron',
'Captain Britain',
'Iron Man',
'The Punisher',
'Ego the Living Planet',
'Nightcrawler',
'Annihilus',
'Deadpool',
'Captain America',
'Fin Fang Foom',
'Daredevil',
'J Jonah Jameson',
'Kang the Conqueror',
'Beta Ray Bill',
'Doctor Stephen Strange',
'Wolverine',
'MODOK',
'Nick Fury',
|
'Emma Fro
|
st',
'Black Panther',
'The Hulk',
'Thing',
'Galactus',
'Magneto',
'Spider-Man',
'Doctor Victor Von Doom',
]
left = random.choice(adjective_list)
right = random.choice(subject_list)
name = '{} {}'.format(left, right)
return name
|
xStream-Kodi/plugin.video.xstream
|
sites/animes-stream24_tv.py
|
Python
|
gpl-3.0
| 12,975
| 0.019352
|
# -*- coding: utf-8 -*-
from resources.lib.gui.gui import cGui
from resources.lib.gui.guiElement import cGuiElement
from resources.lib.handler.requestHandler import cRequestHandler
from resources.lib.parser import cParser
from resources.lib.handler.ParameterHandler import ParameterHandler
from resources.lib import logger
from resources.lib.config import cConfig
import re, time, xbmcgui
SITE_IDENTIFIER = 'animes-stream24_tv'
SITE_NAME = 'Animes-Stream24'
SITE_ICON = 'as24.png'
URL_MAIN = 'http://as.animes-stream24.tv/'
URL_MAIN_2 = 'http://as.anime-stream24.co/' #BACKUP URL
def load():
oGui = cGui()
params = ParameterHandler()
logger.info("Load %s" % SITE_NAME)
if showAdult():
params.setParam('entryMode', "a_z")
oGui.addFolder(cGuiElement('A BIS Z', SITE_IDENTIFIER, 'showMovies'), params)
params.setParam('entryMode', "top_animes")
oGui.addFolder(cGuiElement('Top', SITE_IDENTIFIER, 'showMovies'), params)
params.setParam('entryMode', "new")
oGui.addFolder(cGuiElement('Neuste Animes', SITE_IDENTIFIER, 'showMovies'), params)
params.setParam('entryMode', "a_z")
oGui.addFolder(cGuiElement('Suche', SITE_IDENTIFIER, 'showSearch'), params)
else:
oGui.addFolder(cGuiElement('Um Inhalte sehen zu können, muss der Adult Content aktiviert werden. \n(Klicke hier, um diese zu öffnen)', SITE_IDENTIFIER, 'getConf'), params)
oGui.setEndOfDirectory()
def showMovies(sURL = False, sGui = False, sSearchText = ""):
oGui = sGui if sGui else cGui()
if not sURL: sURL = URL_MAIN
params = ParameterHandler()
eMode = ""
if not eMode:
eMode = params.getValue('entryMode')
else:
eMode = "ERROR"
if "top_animes" in eMode:
pattern = 'class="separator".*?<a href="([^"]+)".*?' #link
pattern += '<img src="([^"]+)".*?' #img
pattern += '([^><]+)</a>' #titel
elif "a_z" in eMode:
pattern = "<option value='([^']+)'>([^><]+)</option>" #link, titel
elif "new" in eMode:
sURL = sURL + "search?updated-max=" + time.strftime("%Y-%m-%d") + "T08:48:00%2B01:00&max-results="
pattern = False
aResult = False
else:
if not sGui: oGui.showInfo('xStream', eMode)
return
if pattern:
oRequestHandler = cRequestHandler(sURL)
sHtmlContent = oRequestHandler.request()
oParser = cParser()
aResult = oParser.parse(sHtmlContent, pattern)
if not aResult[0]:
if not sGui: oGui.showInfo('xStream', 'Es wurde kein Eintrag gefunden')
return
total = len(aResult[1])
qual = "1080"
if "top_animes" in eMode:
for link, img, title in aResult[1]:
guiElement = cGuiElement(title, SITE_IDENTIFIER, 'getEpisodes')
guiElement.setThumbnail(img)
#guiElement.setDescription(plot.decode('iso-8859-1'))
guiElement.setMediaType('movie')
params.setParam('eUrl',link)
oGui.addFolder(guiElement, params, bIsFolder = True, iTotal = total)
elif "new" in eMode:
ymd_date = time.strftime("%Y-%m-%d")
params.setParam('eUrl',sURL + "11")
oGui.addFolder(cGuiElement("Zeige letzte 11 Einträge (" + ymd_date +")", SITE_IDENTIFIER, 'getEpisodes'),params)
params.setParam('eUrl',sURL + "22")
oGui.addFolder(cGuiElement("Zeige letzte 22 Einträge (" + ymd_date +")", SITE_IDENTIFIER, 'getEpisodes'),params)
params.setParam('eUrl',sURL + "44")
oGui.addFolder(cGuiElement("Zeige letzte 44 Einträge (" + ymd_date +")", SITE_IDENTIFIER, 'getEpisodes'),params)
elif "a_z" in eMode:
#sPattern = params.getValue('search_on')
sPattern = sSearchText; a = []
reg_ex = re.compile('.*' + sSearchText + '.*?', re.I)
pattern = "class='post-title entry-title'><a href='([^']+)'>" #link
pattern += "([^><]+).*?" #ep_Name
pattern += '<img.*?src="([^"]+)".*?bung:.*?/>' #Img
pattern += "(.*?)<br./>" #plot /Gen
if sPattern:
for link, title in aResult[1]:
if re.search(reg_ex,title):
guiElement = cGuiElement(title, SITE_IDENTIFIER, 'getEpisodes')
sHtml = cRequestHandler(link).request()
a = oParser.parse(sHtml, pattern)
#xbmcgui.Dialog().ok("SHOW",str(a[1][1][3])) #.encode("utf-8"))
guiElement.setThumbnail(a[1][1][2])
guiElement.setDescription(a[1][1][3])
params.setParam('eUrl',link)
oGui.addFolder(guiElement, params, bIsFolder = True, iTotal = total)
else:
for link, title in aResult[1]:
guiElement = cGuiElement(title, SITE_IDENTIFIER, 'getEpisodes')
"""
TODO: ERROR HANDLING OUT OF RANGE - LAEDT SONST EWIG FUER DEN REQUEST
EVENTL AUFTEILEN ODER EINZELNE THREADS??
----------------------------------------------------------------------
sHtml = cRequestHandler(link).request()
a = oParser.parse(sHtml, pattern)
guiElement.setThumbnail(a[1][1][2])
guiElement.setDescription(a[1][1][3].decode('iso-8859-1').encode('utf-8'))
"""
params.setParam('eUrl',link)
oGui.addFolder(guiElement, params, bIsFolder = True, iTotal = total)
oGui.setView('movies')
oGui.setEndOfDirectory()
def getEpisodes():
oGui = cGui()
oParser = cParser()
params = ParameterHandler()
eUrl = ParameterHandler().getValue('eUrl')
eUrl = eUrl.replace(" ", "%20"); eUrl = eUrl.replace("+", "%2B") #Decode(Leerzeichen, +)
isMovie = True
pattern = "class='post-title entry-title'><a href='([^']+)'>" #link
pattern += "([^><]+).*?" #ep_Name
pattern += '<img.*?src="([^"]+)".*?bung:.*?/>' #Img
pattern += "(.*?)<br./>" #plot /Gen
sHtmlContent = cRequestHandler(eUrl).request()
aResult = oParser.parse(sHtmlContent, pattern)
bResult = oParser.parse(sHtmlContent, "older-link'.*?href='([^']+)'")
if not aResult[0]:
oGui.showInfo('xStream', 'Es wurde kein Eintrag gefunden')
|
return
total = len(aResult[1])
for link, title, img, plot in aResult[1]:
GuiElement = cGuiElement(title, SITE_IDENTIFIER, 'getHosters')
GuiElement.setMediaType('movie' if isMovie else 'tvshow')
GuiElement.setThumbnail(img)
plot.replace('<b>', '')
GuiElement.setDescription(plot)#.dec
|
ode('iso-8859-1').encode('utf-8'))
#GuiElement.setYear(year)
params.setParam('siteUrl', link)
params.setParam('sName', title)
oGui.addFolder(GuiElement, params, False, total)
if 'entry-title' in cRequestHandler(bResult[1][0]).request():
params.setParam('eUrl', bResult[1][0])
oGui.addFolder(cGuiElement("Weitere Episoden -->", SITE_IDENTIFIER, 'getEpisodes'),params)
#logger.info('[[suhmser]] %s: ' % str(bResult[1][0]))
oGui.setView('movies')
oGui.setEndOfDirectory()
def getHosters():
oParams = ParameterHandler()
oGui = cGui()
sUrl = oParams.getValue('siteUrl')
sHtmlContent = cRequestHandler(sUrl).request()
sPattern = '<iframe.*?(?:src|SRC)="([^"]+).*?(?:\<\/if|\<\/IF)'
sPattern_bkp = '-[0-9]".?>.*?(?:src|SRC)="([^"]+)".*?'
#sPattern_alone = '#fragment.*?src|SRC="//([^"]+)".*?>(?:' #s_url
aResult = cParser().parse(sHtmlContent, sPattern)
if aResult[0]:
hosters = []
#test_link = "*.mp4"
#hosters.append({'link': test_link, 'name': 'Testing_link', 'resolveable': True})
reg_ex = re.compile('(?://|\.)?(?:[a-zA-Z0-9]+\.)?([a-zA-Z0-9-.]{0,})\..*?\/.*?\/?', re.I)
for sUrl in aResult[1]:
sName = re.search(reg_ex, sUrl).group(1)
if not sUrl.startswith('http'):
if sUrl.startswith('//'):
sUrl = 'http:%s' % sUrl
else:
sUrl = 'http://%s' % sUrl
hosters.append({'link': sUrl, 'name': sName, 'resolveable': True})
if hosters:
hosters.append('getHosterU
|
luisgg/iteexe
|
exe/engine/parasabermasfpdidevice.py
|
Python
|
gpl-2.0
| 5,051
| 0.009114
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ===========================================================================
# iDevice Para Saber más creado para la FPD por José Ramón Jiménez Reyes
# ===========================================================================
"""
Para Saber mas iDevice
"""
import logging
from exe.engine.idevice import Idevice
from exe.engine.translate import lateTranslate
from exe.engine.field import TextAreaField
import re
log = logging.getLogger(__name__)
# ===========================================================================
class ParasabermasfpdIdevice(Idevice):
"""
El iDevice Para saber permite al alumnado ampliar conocimientos voluntarios para su aprendizaje
"""
persistenceVersion = 7
def __init__(self, activity = "", answer = ""):
"""
Initialize
"""
Idevice.__init__(self,
x_(u"FPD - Para Saber Mas"),
x_(u"Jose Ramon Jimenez Reyes"),
x_(u"""Para saber más es un iDevice que permite al alumnado ampliar conocimientos, siendo estos voluntarios para su aprendizaje."""), u"", u"parasabermasfpd")
# self.emphasis = Idevice.SomeEmphasis
self.emphasis = "_parasabermasfpd"
self._activityInstruc = x_(u"""Introduce el texto que aparecerá en este iDevice""")
# self.systemResources += ["common.js"]
self.activityTextArea = TextAreaField(x_(u'Texto Para saber más'),
self._activityInstruc, activity)
self.activityTextArea.idevice = self
# Properties
activityInstruc = lateTranslate('activityInstruc')
def getResourcesField(self, this_resource):
"""
implement the specific resource finding mechanism for this iDevice:
"""
# be warned that before upgrading, this iDevice field could not exist:
if hasattr(self, 'activityTextArea')\
and hasattr(self.activityTextArea, 'images'):
for this_image in self.activityTextArea.images:
if hasattr(this_image, '_imageResource') \
and this_resource == this_image._imageResource:
return self.activityTextArea
return None
def getRichTextFields(self):
fields_list = []
if hasattr(self, 'activityTextArea'):
fields_list.append(self.activityTextArea)
return fields_list
def burstHTML(self, i):
# Parasabermasfpd Idevice:
title = i.find(name='span', attrs={'class' : 'iDeviceTitle' })
self.title = title.renderContents().decode('utf-8')
reflections = i.findAll(name='div', attrs={'id' : re.compile('^ta') })
# should be exactly two of these:
# 1st = field[0] == Activity
if len(reflections) >= 1:
self.activityTextArea.content_wo_resourcePaths = \
reflections[0].renderContents().decode('utf-8')
# and add the LOCAL resource paths back in:
self
|
.activityTextArea.content_w_resourcePaths = \
self.activityTextArea.MassageResourceDirsIntoContent( \
self.activityTextArea.content_wo_resourcePaths)
self.activityTextArea.content = \
self.activityTextArea.content_w_resourcePaths
def upgradeToVersion1(self):
"""
Upgrades the node from version
|
0 to 1.
"""
log.debug(u"Upgrading iDevice")
self.icon = u"activity"
def upgradeToVersion2(self):
"""
Upgrades the node from 1 (v0.5) to 2 (v0.6).
Old packages will loose their icons, but they will load.
"""
log.debug(u"Upgrading iDevice")
# self.emphasis = Idevice.SomeEmphasis
self.emphasis = "_parasabermasfpd"
def upgradeToVersion3(self):
"""
Upgrades v0.6 to v0.7.
"""
self.lastIdevice = False
def upgradeToVersion4(self):
"""
Upgrades to exe v0.10
"""
self._upgradeIdeviceToVersion1()
self._activityInstruc = self.__dict__['activityInstruc']
def upgradeToVersion5(self):
"""
Upgrades to exe v0.10
"""
self._upgradeIdeviceToVersion1()
def upgradeToVersion6(self):
"""
Upgrades to v0.12
"""
self._upgradeIdeviceToVersion2()
# self.systemResources += ["common.js"]
def upgradeToVersion7(self):
"""
Upgrades to somewhere before version 0.25 (post-v0.24)
Taking the old unicode string fields, and converting them
into image-enabled TextAreaFields:
"""
self.activityTextArea = TextAreaField(x_(u'Texto Para sabe más'),
self._activityInstruc, self.activity)
self.activityTextArea.idevice = self
# ===========================================================================
|
iosonofabio/singlet
|
singlet/io/h5ad/__init__.py
|
Python
|
mit
| 1,069
| 0
|
# vim: fdm=indent
# author: Fabio Zanini
# date: 02/08/17
# content: Support module for filenames related to LOOM files.
# Modules
import numpy as np
import pandas as pd
from singlet.config import config
# Parser
def parse_dataset(
path,
obsm_keys=None,
):
import anndata
adata = anndata.read_h5ad(path)
samplesheet = adata.obs.copy()
# Add obsm (e.g. PCA, embeddings)
for key, array in adata.obsm.items():
if key.startswith('X_'):
newkey = key[2:]
if (obsm_keys is not None) and (newkey not in obsm_keys):
continue
for j
|
, col in enumerate(array.T, 1):
samplesheet[newkey+'_'+str(j)] = col
featuresheet = adata.var.copy()
count_mat = adata.X.toarray().T
counts_table = pd.DataFrame(
data=count_mat,
index=featuresheet.index,
columns=samplesheet.index,
)
return {
'counts': counts_table,
'samplesheet': samplesheet
|
,
'featuresheet': featuresheet,
}
|
lamondlab/pyctk
|
tests/test_axeswidget.py
|
Python
|
apache-2.0
| 1,326
| 0.008296
|
# ===========================================================================
#
# Library: PyCTK
# Filename: test_axeswidget.py
#
# Copyright (c) 2015 Lamond Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# =========================================================================
|
==
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyCTK.Widgets import ctkAxesWidget
class Widget(QWidget):
def __init__(self, parent=None, **kwargs):
super().__init__(parent, **kwargs)
l=QVBoxLayout(self)
self._axesWidget=ctkAxesWidget(self)
|
l.addWidget(self._axesWidget)
if __name__=="__main__":
from sys import argv, exit
a=QApplication(argv)
w=Widget()
w.show()
w.raise_()
exit(a.exec_())
|
dls-controls/pymalcolm
|
malcolm/modules/pva/controllers/pvaservercomms.py
|
Python
|
apache-2.0
| 14,734
| 0.000882
|
from typing import Any, Dict, List, Optional, Set
from annotypes import add_call_types, stringify_error
from cothread import cothread
from p4p import Value
from p4p.server import DynamicProvider, Server, ServerOperation
from p4p.server.cothread import Handler, SharedPV
from malcolm.core import (
APublished,
BlockMeta,
Controller,
Delta,
Error,
Method,
MethodModel,
Post,
ProcessPublishHook,
Put,
Response,
Return,
RLock,
Subscribe,
Unsubscribe,
method_return_unpacked,
)
from malcolm.modules import builtin
from .pvaconvert import convert_dict_to_value, convert_value_to_dict, update_path
class BlockHandler(Handler):
def __init__(self, controller: Controller, field: str = None) -> None:
self.controller = controller
# Lock to control access to self.pv
self._lock = RLock()
self.field = field
self.pv: Optional[SharedPV] = None
self.value: Value = None
self.put_paths: Set[str] = set()
def rpc(self, pv: SharedPV, op: ServerOperation) -> None:
value = op.value()
if value.getID() == "epics:nt/NTURI:1.0":
# We got an NTURI, get path from path and parameters from query
assert value.scheme == "pva", "Can only handle NTURI with scheme=pva"
prefix = self.controller.mri + "."
assert value.path.startswith(
prefix
), "NTURI path '%s' doesn't start with '%s'" % (value.path, prefix)
method = value.path[len(prefix) :]
parameters = convert_value_to_dict(value.query)
else:
# We got something else, take path from pvRequest method and our mri
# and parameters from the full value
if self.field is not None:
# We already know the method name
method = self.field
else:
# Get the path and string "value" from the put value
method = op.pvRequest().get("method")
assert method, "No 'method' in pvRequest:\n%s" % op.pvRequest()
parameters = convert_value_to_dict(value)
path = [self.controller.mri, method]
view = self.controller.block_view()[method]
assert isinstance(
view, Method
), "%s.%s is not a Method so cannot do RPC" % tuple(path)
add_wrapper = method_return_unpacked() in view.meta.tags
self.controller.log.debug(
f"{self.controller.mri}: RPC method {method} called with "
f"params {parameters}"
)
post = Post(path=path, parameters=parameters)
def handle_post_response(response: Response) -> None:
if isinstance(response, Return):
ret: Any
if add_wrapper:
# Method gave us return unpacked (bare string or other type)
# so we must wrap it in a structure to send it
ret = {"return": response.value}
else:
ret = response.value
v = convert_dict_to_value(ret)
if ret:
self.controller.log.debug(
f"{self.controller.mri}: RPC method {method} returned with "
f"value {ret}"
)
else:
self.controller.log.debug(
f"{self.controller.mri}: RPC method {method} returned"
)
op.done(v)
else:
if isinstance(response, Error):
message = stringify_error(response.message)
self.controller.log.debug(
f"{self.controller.mri}: RPC method {method} resulted in "
f"error ({message})"
)
else:
message = "BadResponse: %s" % response.to_dict()
self.controller.log.debug(
f"{self.controller.mri}: RPC method {method} got a bad "
f"response ({message})"
)
op.done(error=message)
post.set_callback(handle_post_response)
self.controller.handle_request(post).get()
def put(self, pv: SharedPV, op: ServerOperation) -> None:
path = [self.controller.mri]
# We work out what to Put by taking every field that is marked as
# changed and walking up the tree, adding ev
|
ery dotted field name
# to the tree on the way up. This set will contain something like:
# {"attr.value", "attr"}
# Or for a table:
#
|
{"table.value.colA", "table.value.colB", "table.value", "table"}
# Or if self.field:
# {"value"}
changed_fields_inc_parents = op.value().changedSet(parents=True, expand=False)
# Taking the intersection with all puttable paths should yield the
# thing we want to change, so value_changed would be:
# {"attr.value"} or {"table.value"} or {"value"}
value_changed = changed_fields_inc_parents.intersection(self.put_paths)
assert (
len(value_changed) == 1
), "Can only do a Put to a single field, got %s" % list(value_changed)
changed = list(value_changed)[0]
if self.field is not None:
# Only accept a Put to "value"
assert changed == "value", "Can only put to value of %s.%s, not %s" % (
self.controller.mri,
self.field,
changed,
)
path += [self.field, "value"]
op_value = op.value()
else:
# Get the path and string "value" from the put value
split = changed.split(".")
assert (
len(split) == 2 and split[1] == "value"
), "Can only put to value of %s.%s, not %s" % (
self.controller.mri,
split[0],
split[1],
)
path += list(split)
op_value = op.value()[split[0]]
value = convert_value_to_dict(op_value)["value"]
put = Put(path=path, value=value)
def handle_put_response(response: Response) -> None:
if isinstance(response, Return):
op.done()
else:
if isinstance(response, Error):
message = stringify_error(response.message)
else:
message = "BadResponse: %s" % response.to_dict()
op.done(error=message)
put.set_callback(handle_put_response)
self.controller.handle_request(put).get()
def handle(self, response: Response) -> None:
# Called from whatever thread the child block could be in, so
# must already be a good thread to take the lock
with self._lock:
if self.pv:
# onFirstConnect has been called, should be able to update it
try:
assert isinstance(response, Delta), (
"Expecting Delta response, got %s" % response
)
# We got a delta, create or update value and notify
if self.value is None:
# Open it with the value
self.controller.log.debug("About to open")
self._create_initial_value(response)
elif self.pv.isOpen():
# Update it with values
self._update_value(response)
except Exception:
self.controller.log.debug(
f"Closing pv because of error in response {response}",
exc_info=True,
)
# We got a return or error, close the connection to clients
self.pv.close()
def _create_initial_value(self, response: Delta) -> None:
# Called with the lock taken
assert response.changes, "No changes"
assert (
len(response.changes) == 1
and len(response.ch
|
gotostack/neutron-lbaas
|
neutron_lbaas/tests/tempest/v2/api/base.py
|
Python
|
apache-2.0
| 14,152
| 0.000071
|
# Copyright 2015 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import time
from neutron.i18n import _, _LI
from neutron_lbaas.tests.tempest.v2.clients import health_monitors_client
from neutron_lbaas.tests.tempest.v2.clients import listeners_client
from neutron_lbaas.tests.tempest.v2.clients import load_balancers_client
from neutron_lbaas.tests.tempest.v2.clients import members_client
from neutron_lbaas.tests.tempest.v2.clients import pools_client
from tempest.api.network import base
from tempest import clients as tempest_clients
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
CONF = config.CONF
# Use local tempest conf if one is available.
# This usually means we're running tests outside of devstack
if os.path.exists('./tests/tempest/etc/dev_tempest.conf'):
CONF.set_config_path('./tests/tempest/etc/dev_tempest.conf')
class BaseTestCase(base.BaseNetworkTest):
# This class picks non-admin credentials and run the tempest tests
_lbs_to_delete = []
@classmethod
def resource_setup(cls):
super(BaseTestCase, cls).resource_setup()
credentials = cls.isolated_creds.get_primary_creds()
mgr = tempest_clients.Manager(credentials=credentials)
auth_provider = mgr.get_auth_provider(credentials)
client_args = [auth_provider, 'network', 'regionOne']
cls.load_balancers_client = (
load_balancers_client.LoadBalancersClientJSON(*client_args))
cls.listeners_client = (
listeners_client.ListenersClientJSON(*client_args))
cls.pools_client = pools_client.PoolsClientJSON(*client_args)
cls.members_client = members_client.MembersClientJSON(*client_args)
cls.health_monitors_client = (
health_monitors_client.HealthMonitorsClientJSON(*client_args))
@classmethod
def resource_cleanup(cls):
for lb_id in cls._lbs_to_delete:
try:
lb = cls.load_balancers_client.get_load_balancer_status_tree(
lb_id).get('loadbalancer')
except exceptions.NotFo
|
und:
continue
for listener in lb.get('listeners'):
for pool in listener.get('pools'):
hm = pool.get('healthmonitor')
if hm:
cls._try_delete_resource(
cls.health_monitors_client.delete_health_monitor,
pool.get('healthmonitor').get('id'))
cls._wait_for_load_balancer
|
_status(lb_id)
cls._try_delete_resource(cls.pools_client.delete_pool,
pool.get('id'))
cls._wait_for_load_balancer_status(lb_id)
health_monitor = pool.get('healthmonitor')
if health_monitor:
cls._try_delete_resource(
cls.health_monitors_client.delete_health_monitor,
health_monitor.get('id'))
cls._wait_for_load_balancer_status(lb_id)
cls._try_delete_resource(cls.listeners_client.delete_listener,
listener.get('id'))
cls._wait_for_load_balancer_status(lb_id)
cls._try_delete_resource(
cls.load_balancers_client.delete_load_balancer, lb_id)
super(BaseTestCase, cls).resource_cleanup()
@classmethod
def setUpClass(cls):
cls.LOG = logging.getLogger(cls._get_full_case_name())
super(BaseTestCase, cls).setUpClass()
def setUp(cls):
cls.LOG.info(_LI('Starting: {0}').format(cls._testMethodName))
super(BaseTestCase, cls).setUp()
def tearDown(cls):
super(BaseTestCase, cls).tearDown()
cls.LOG.info(_LI('Finished: {0}\n').format(cls._testMethodName))
@classmethod
def _create_load_balancer(cls, wait=True, **lb_kwargs):
try:
lb = cls.load_balancers_client.create_load_balancer(**lb_kwargs)
if wait:
cls._wait_for_load_balancer_status(lb.get('id'))
except Exception:
raise Exception(_("Failed to create load balancer..."))
cls._lbs_to_delete.append(lb.get('id'))
return lb
@classmethod
def _create_active_load_balancer(cls, **kwargs):
lb = cls._create_load_balancer(**kwargs)
lb = cls._wait_for_load_balancer_status(lb.get('id'))
return lb
@classmethod
def _delete_load_balancer(cls, load_balancer_id, wait=True):
cls.load_balancers_client.delete_load_balancer(load_balancer_id)
if wait:
cls._wait_for_load_balancer_status(
load_balancer_id, delete=True)
@classmethod
def _update_load_balancer(cls, load_balancer_id, wait=True, **lb_kwargs):
lb = cls.load_balancers_client.update_load_balancer(
load_balancer_id, **lb_kwargs)
if wait:
cls._wait_for_load_balancer_status(
load_balancer_id)
return lb
@classmethod
def _wait_for_load_balancer_status(cls, load_balancer_id,
provisioning_status='ACTIVE',
operating_status='ONLINE',
delete=False):
interval_time = 10
timeout = 300
end_time = time.time() + timeout
lb = {}
while time.time() < end_time:
try:
lb = cls.load_balancers_client.get_load_balancer(
load_balancer_id)
if not lb:
# loadbalancer not found
if delete:
break
else:
raise Exception(
_("loadbalancer {lb_id} not"
" found").format(
lb_id=load_balancer_id))
if (lb.get('provisioning_status') == provisioning_status and
lb.get('operating_status') == operating_status):
break
time.sleep(interval_time)
except exceptions.NotFound as e:
# if wait is for delete operation do break
if delete:
break
else:
# raise original exception
raise e
else:
raise Exception(
_("Wait for load balancer ran for {timeout} seconds and did "
"not observe {lb_id} reach {provisioning_status} "
"provisioning status and {operating_status} "
"operating status.").format(
timeout=timeout,
lb_id=load_balancer_id,
provisioning_status=provisioning_status,
operating_status=operating_status))
return lb
@classmethod
def _create_listener(cls, wait=True, **listener_kwargs):
listener = cls.listeners_client.create_listener(**listener_kwargs)
if wait:
cls._wait_for_load_balancer_status(cls.load_balancer.get('id'))
return listener
@classmethod
def _delete_listener(cls, listener_id, wait=True):
cls.listeners_client.delete_listener(listener_id)
if wait:
cls._wait_for_load_balancer_status(cls.load_balancer.get('id'))
@classmethod
def _update_listener(cls, listener_id, wait=True, **listener_kwargs):
listener = cls.listene
|
kapilgarg1996/gmc
|
gmc/conf/global_settings.py
|
Python
|
mit
| 1,053
| 0.021842
|
DATASET_DIR = '/tmp'
BRAIN_DIR = '/tmp'
GENRES = [
'blues', 'classical', 'country', 'disco', 'hiphop',
'jazz', 'metal', 'pop', 'reggae', 'rock'
]
NUM_BEATS = 10
KEEP_FRAMES = 0
TRAIN_TEST_RATIO = [7, 3]
MODE = 'nn'
PCA = False
FEATURES = ['mfcc', 'dwt', 'beat']
MFCC_EXTRA = ['delta', 'ddelta', 'energy']
DWT = ['mean', 'std', 'max', 'min']
FEATURES_LENGTH = {
'mfcc' : 160,
'dwt' : 112,
'beat' : 11
}
FRAME_LENGTH = 0.025
HOP_LENGTH = 0.005
N_MFCC = 13
W_FRAME_SCALE = 10
NN = {
'NUM_HIDDEN_LAYERS' : 2,
'HIDDEN_INPUTS' : [1024, 1024],
'RANDOM' : True,
'BATCH_SIZE' : 100,
'TRAINING_CYCLES' : 1000,
'LEARNING_RATE' : 0.01
|
,
'DROPOUT_PROB' : 0.6
}
CNN = {
'NUM_HIDDEN_LAYERS' : 2,
'NUM_DENSE_LAYERS' : 1,
'HIDDEN_FEATURES' : [32, 64],
'DENSE_INPUTS' : [128],
'INPUT_SHAPE' : [16, 17],
'PATCH_SIZE' : [5, 5],
'RANDOM' : False,
'STRIDES' : [1, 1, 1, 1],
'BATCH_SIZE' : 100,
'T
|
RAINING_CYCLES' : 1000,
'LEARNING_RATE' : 0.01,
'DROPOUT_PROB' : 0.6
}
|
jdilallo/jdilallo-test
|
examples/adwords/v201309/campaign_management/get_all_disapproved_ads_with_awql.py
|
Python
|
apache-2.0
| 1,927
| 0.008822
|
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all disapproved ads for a given campaign with AWQL.
To add an ad, run add_ads.py.
Tags: AdGroupAdService.get
"""
__author__ = ('api.kwinter@gmail.com (Kevin Winter)'
'Joseph DiLallo')
from googleads import adwords
CAMPAIGN_ID = 'INSERT_CAMPAIGN_ID_HERE'
def main(client, campaign_id):
# Initi
|
alize appropriate service.
ad_group_ad_service = client.GetService('AdGroupAdService', version='v201309')
# Construct query and get all ads for a given campaign.
query = ('SELECT Id, AdGroupAdDisapprovalReasons '
'WHERE CampaignId = %s AND '
'AdGroupCreativeApprovalStatus = DISAPPROVED '
'ORDER BY Id' % campaign_id)
ads = ad_group_ad_service.query(query)
#
|
Display results.
if 'entries' in ads:
for ad in ads['entries']:
print ('Ad with id \'%s\' was disapproved for the following reasons: '
% (ad['ad']['id']))
if ad['ad'].get('disapprovalReasons'):
for reason in ad['ad']['disapprovalReasons']:
print '\t%s' % reason
else:
print '\tReason not provided.'
else:
print 'No disapproved ads were found.'
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, CAMPAIGN_ID)
|
carlsonp/kaggle-TrulyNative
|
scikit_generate_prediction2.py
|
Python
|
gpl-3.0
| 1,784
| 0.01065
|
from __future__ import print_function
import pickle, os, sys, glob, hashlib
from sklearn.ensemble import RandomForestClassifier
import pandas as pd
test_files = set(pd.read_csv('./data/sampleSubmission_v2.csv').file.values)
train = pd.read_csv('./data/train_v2.csv')
df_full = pickle.load(open( "df_full.p", "rb"))
#no point using empty files in our training set so we remove them
print('--- Removing empty files')
filepaths = glob.glob('data/*/*.txt')
for filepath in filepaths:
if os.path.getsize(filepath) == 0:
filename = os.path.basename(filepath)
df_full = df_full[df_full.file != filename]
if filename in test_files:
print("Found empty file in submission: ", filename)
#https://www.youtube.com/watch?v=0GrciaGYzV0
print('--- Training random forest')
clf = RandomForestClassifier(n_estimators=300
|
, n_jobs=-1, random_state=0)
train_data = df_full[df_full.sponsored.notnull()].fillna(0)
test = df_full[df_full.sponsored.isnull() & df_full.file.isin(test_files)].fillna(0)
clf.fit(train_data.drop(['file', 'sponsored'], 1), train_data.sponsored)
#normalized value between 0 and 1
feature_importances = pd.Series(clf.feature_importances_, index=train_data.drop(['file', 'sponsored'], 1).columns)
feature_importances.sort()
|
with pd.option_context('display.max_rows', len(feature_importances), 'display.max_columns', 10):
print(feature_importances)
print('--- Create predictions and submission')
submission = test[['file']].reset_index(drop=True)
submission['sponsored'] = clf.predict_proba(test.drop(['file', 'sponsored'], 1))[:, 1]
#make sure submission has the correct number of rows
if len(submission) != 66772:
print("Error: wrong dimension! Not generating submission CSV file.")
else:
submission.to_csv('native_btb_basic_submission.csv', index=False)
|
plotly/python-api
|
packages/python/plotly/plotly/graph_objs/scattermapbox/_textfont.py
|
Python
|
mit
| 8,586
| 0.000582
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Textfont(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "scattermapbox"
_path_str = "scattermapbox.textfont"
_valid_props = {"color", "family", "size"}
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
Returns
-------
int|float
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
"""
def __init__(self, arg=None, color=None, family=None, size=None, **kwargs):
"""
Construct a new Textfont object
Sets the icon text font (color=mapbox.layer.paint.text-color,
size=mapbox.layer.layout.text-size). Has an effect only when
`type` is set to "symbol".
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.scattermapbox.Textfont`
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.pl
|
otly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Over
|
pass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
Returns
-------
Textfont
"""
super(Textfont, self).__init__("textfont")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.scattermapbox.Textfont
constructor must be a dict or
an instance of :class:`plotly.graph_objs.scattermapbox.Textfont`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
|
PhilHarnish/forge
|
src/puzzle/problems/crossword/_cryptic_nodes.py
|
Python
|
mit
| 437
| 0.011442
|
from typing import NamedTuple, List
from data import crossword
class Cl
|
ue(str):
def __init__(self, value) -> None:
super(Clue, self).__init__(value)
self._tokens = crossword.tokenize_clue(value)
class _Node(object):
_clue: Clue
_occupied: int
def __init_
|
_(self, clue: Clue, occupied: int) -> None:
self._clue = clue
self._occupied = occupied
class Parsed(List):
pass
# A list of nodes, initially Nulls
|
AnTAVR/aai2
|
src/modules/net/m_wifi.py
|
Python
|
gpl-2.0
| 517
| 0
|
import logging
from ge
|
ttext import gettext as _
from .l_connector import ConnectorBase
from .l_net import ModuleStrategyBase
from .main import OptionsWIFI
logger = logging.getLogger(__name__)
# noinspection PyAbstractClass
class Connector(ConnectorBase):
opti_: OptionsWIFI
class Module(ModuleStrategyBase):
ID = 'net_wifi'
opti_: OptionsWIFI
OptionsClass = OptionsWIFI
_connector: Connector
ConnectorClass = Connector
@property
def name(self) -> str:
return _
|
('WIFI')
|
ideaworld/FHIR_Tester
|
FHIR_Tester_backend/home/views.py
|
Python
|
mit
| 7,138
| 0.008826
|
from django.shortcuts import render
from services.genomics_test_generator.fhir_genomics_test_gene import *
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse, HttpResponseRedirect
from django.template import RequestContext, loader
import json
from home.task_runner import perform_test
from home.models import task, server, resource
from home.search import search_basedon_id
from services import auth
from home.matrix import form_resource_martix, form_level_martix,form_matrix
import traceback
# Create your views here.
@csrf_exempt
def submit_task(request):
#get code, language, type
req_json = json.loads(request.body)
print req_json
code = req_json['code']
language = req_json['language']
test_type = req_json['type']
resource_list = []
if test_type == 3 or test_type == 0:
resource_state = req_json['resources']
print resource_state
for item in resource_state:
if item['checked']:
resource_
|
list.append(item['name'])
print resource_list
if 'chosen_server' in req_json:
#ser url and access token
try:
server_obj = server.objects.get(server_id=int(req_json['chosen_server']))
url = server_obj.server_url
access_token = server_obj.access_token
except:
traceback.print_exc()
result = {
'isSuccessful':False,
'error':"Invalid server"
}
retur
|
n HttpResponse(json.dumps(result), content_type="application/json")
else:
access_token = req_json['access_token']
url = req_json['url']
token = None
try:
token = req_json['token']
except:
pass
username = None
if token:
username = auth.extract_username(token)
print access_token
#return task id
if 'chosen_server' in req_json:
task_id = perform_test(language=language,code=code,url=url,test_type=test_type,server_id=req_json['chosen_server'], resource_list=resource_list, access_token=access_token, username=username)
else:
task_id = perform_test(language=language,code=code,url=url,test_type=test_type,server_id=None, resource_list=resource_list, access_token=access_token, username=username)
result = {
'isSuccessful':True,
'task_id':task_id
}
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def get_resource_matrix(request):
result = form_resource_martix()
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def get_resources(request):
resource_type = request.GET.get('type', 0)
if isinstance(resource_type,str):
try:
resource_type = int(resource_type)
except:
resource_type = 0
result = {
'isSuccessful':False,
'names':[]
}
try:
resources = resource.objects.filter(resource_type=resource_type)
for resource_obj in resources:
result['names'].append({'name':resource_obj.name,'checked':True})
result['isSuccessful'] = True
except:
pass
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def add_new_server(request):
req_json = json.loads(request.body)
result = {
'isSuccessful': False
}
try:
server_name = req_json['name']
server_url = req_json['url']
access_token = None
if 'token' in req_json:
access_token = req_json['token']
new_server = server(server_name=server_name,server_url=server_url,access_token=access_token)
new_server.save()
result['isSuccessful'] = True
except:
pass
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def delete_server(request):
req_json = json.loads(request.body)
result = {
'isSuccessful': False
}
try:
server_id = req_json['id']
server_obj = server.objects.get(server_id=server_id)
if server_obj.is_deletable:
server_obj.is_delete = True
server_obj.save()
result['isSuccessful'] = True
else:
result['error'] = 'No access to delete this server'
except:
result['error'] = 'problem while deleting'
return HttpResponse(json.dumps(result), content_type='application/json')
@csrf_exempt
def get_all_servers(request):
result = {
'isSuccessful' : False
}
try:
server_list = server.objects.filter(is_delete=False)
result['servers'] = []
for server_obj in server_list:
result['servers'].append({'name':server_obj.server_name,'id':server_obj.server_id,'url':server_obj.server_url, 'is_deletable':server_obj.is_deletable})
result['isSuccessful'] = True
except:
pass
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def get_user_task_history(request):
req_json = json.loads(request.body)
try:
token = req_json['token']
except:
return {
'isSuccessful': False
}
result = {
'isSuccessful': False
}
if token:
try:
username = auth.extract_username(token)
task_obj_list = task.objects.filter(user_id=username)
task_list = []
for task_obj in task_obj_list:
task_id = task_obj.task_id
task_time = task_obj.create_time
task_list.append({
'task_id':task_id,
'time':task_time.strftime("%Y-%m-%d")
})
result['tasks'] = task_list
result['isSuccessful'] = True
except:
pass
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def search_task(request):
req_json = json.loads(request.body)
keyword = req_json['keyword']
result = {
'isSuccessful': True
}
result['tasks'] = search_basedon_id(keyword)
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def all_test_time(request):
req_json = json.loads(request.body)
ttype = req_json['ttype']
result = {
'isSuccessful':True
}
time_list = task.objects.filter(task_type=ttype,status="finished",).order_by('-create_time').values_list('create_time', flat=True)
strtime_list = []
for time_obj in time_list:
strtime_list.append(time_obj.strftime('%Y-%m-%d %H:%M:%S'))
result['times'] = strtime_list
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def get_certain_matrix(request):
req_json = json.loads(request.body)
ttype = str(req_json['ttype'])
result = {
'isSuccessful':True
}
ttime = None
if 'time' in req_json:
ttime = req_json['time']
print ttime,ttype
result['matrix'] = form_matrix(ttype, ttime)
return HttpResponse(json.dumps(result), content_type="application/json")
|
apporc/cinder
|
cinder/volume/drivers/zfssa/zfssaiscsi.py
|
Python
|
apache-2.0
| 45,567
| 0.000044
|
# Copyright (c) 2014, 2015, Oracle and/or its affiliates. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
ZFS Storage Appliance Cinder Volume Driver
"""
import ast
import math
from oslo_config import cfg
from oslo_log import log
from oslo_serialization import base64
from oslo_utils import units
import six
from cinder import exception
from cinder import utils
from cinder.i18n import _, _LE, _LI, _LW
from cinder.image import image_utils
from cinder.volume import driver
from cinder.volume.drivers.san import san
from cinder.volume.drivers.zfssa import zfssarest
from cinder.volume import volume_types
import taskflow.engines
from taskflow.patterns import linear_flow as lf
from taskflow import task
CONF = cfg.CONF
LOG = log.getLogger(__name__)
ZFSSA_OPTS = [
cfg.StrOpt('zfssa_pool',
help='Storage pool name.'),
cfg.StrOpt('zfssa_project',
help='Project name.'),
cfg.StrOpt('zfssa_lun_volblocksize', default='8k',
choices=['512', '1k', '2k', '4k', '8k', '16k', '32k', '64k',
'128k'],
help='Block size.'),
cfg.BoolOpt('zfssa_lun_sparse', default=False,
help='Flag to enable sparse (thin-provisioned): True, False.'),
cfg.StrOpt('zfssa_lun_compression', default='off',
choices=['off', 'lzjb', 'gzip-2', 'gzip', 'gzip-9'],
help='Data compression.'),
cfg.StrOpt('zfssa_lun_logbias', default='latency',
choices=['latency', 'throughput'],
help='Synchronous write bias.'),
cfg.StrOpt('zfssa_initiator_group', default='',
help='iSCSI initiator group.'),
cfg.StrOpt('zfssa_initiator', default='',
help='iSCSI initiator IQNs. (comma separated)'),
cfg.StrOpt('zfssa_initiator_user', default='',
help='iSCSI initiator CHAP user (name).'),
cfg.StrOpt('zfssa_initiator_password', default='',
help='Secret of the iSCSI initiator CHAP user.', secret=True),
cfg.StrOpt('zfssa_initiator_config', default='',
help='iSCSI initiators configuration.'),
cfg.StrOpt('zfssa_target_group', default='tgt-grp',
help='iSCSI target group name.'),
cfg.StrOpt('zfssa_target_user', default='',
help='iSCSI target CHAP user (name).'),
cfg.StrOpt('zfssa_target_password', default='', secret=True,
help='Secret of the iSCSI target CHAP user.'),
cfg.StrOpt('zfssa_target_portal',
help='iSCSI target portal (Data-IP:Port, w.x.y.z:3260).'),
cfg.StrOpt('zfssa_target_interfaces',
help='Network interfaces of iSCSI targets. (comma separated)'),
cfg.IntOpt('zfssa_rest_timeout',
help='REST connection timeout. (seconds)'),
cfg.StrOpt('zfssa_replication_ip', default='',
help='IP address used for replication data. (maybe the same as '
'data ip)'),
cfg.BoolOpt('zfssa_enable_local_cache', default=True,
help='Flag to enable local caching: True, False.'),
cfg.StrOpt('zfssa_cache_project', default='os-cinder-cache',
help='Name of ZFSSA project where cache volumes are stored.')
]
CONF.register_opts(ZFSSA_OPTS)
ZFSSA_LUN_SPECS = {
'zfssa:volblocksize',
'zfssa:sparse',
'zfssa:compression',
'zfssa:logbias',
}
def factory_zfssa():
return zfssarest.ZFSSAApi()
class ZFSSAISCSIDriver(driver.ISCSIDriver):
"""ZFSSA Cinder iSCSI volume driver.
Version history:
1.0.1:
Backend enabled volume migration.
Local cache feature.
"""
VERSION = '1.0.1'
protocol = 'iSCSI'
def __init__(self, *args, **kwargs):
super(ZFSSAISCSIDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(ZFSSA_OPTS)
self.configuration.append_config_values(san.san_opts)
self.zfssa = None
self.tgt_zfssa = None
self._stats = None
self.tgtiqn = None
def _get_target_alias(self):
"""return target alias."""
return self.configuration.zfssa_target_group
def do_setup(self, context):
"""Setup - create multiple elements.
Project, initiators, initiatorgroup, target and targetgroup.
"""
lcfg = self.configuration
LOG.info(_LI('Connecting to host: %s.'), lcfg.san_ip)
self.zfssa = factory_zfssa()
self.tgt_zfssa = factory_zfssa()
self.zfssa.set_host(lcfg.san_ip, timeout=lcfg.zfssa_rest_timeout)
auth_str = '%s:%s' % (lcfg.san_login, lcfg.san_password)
auth_str = base64.encode_as_text(auth_str)[:-1]
self.zfssa.login(auth_str)
self.zfssa.create_project(lcfg.zfssa_pool, lcfg.zfssa_project,
compression=lcfg.zfssa_lun_compression,
|
logbias=lcfg.zfssa_lun_logbias)
if lcfg.zfssa_enable_local_cache:
self.zfssa.create_project(lcfg.zfssa_pool,
lcfg.zfssa_cache_proje
|
ct,
compression=lcfg.zfssa_lun_compression,
logbias=lcfg.zfssa_lun_logbias)
schemas = [
{'property': 'image_id',
'description': 'OpenStack image ID',
'type': 'String'},
{'property': 'updated_at',
'description': 'Most recent updated time of image',
'type': 'String'}]
self.zfssa.create_schemas(schemas)
if (lcfg.zfssa_initiator_config != ''):
initiator_config = ast.literal_eval(lcfg.zfssa_initiator_config)
for initiator_group in initiator_config:
zfssa_initiator_group = initiator_group
for zfssa_initiator in initiator_config[zfssa_initiator_group]:
self.zfssa.create_initiator(zfssa_initiator['iqn'],
zfssa_initiator_group + '-' +
zfssa_initiator['iqn'],
chapuser=
zfssa_initiator['user'],
chapsecret=
zfssa_initiator['password'])
if (zfssa_initiator_group != 'default'):
self.zfssa.add_to_initiatorgroup(
zfssa_initiator['iqn'],
zfssa_initiator_group)
else:
LOG.warning(_LW('zfssa_initiator_config not found. '
'Using deprecated configuration options.'))
if (lcfg.zfssa_initiator != '' and
(lcfg.zfssa_initiator_group == '' or
lcfg.zfssa_initiator_group == 'default')):
LOG.warning(_LW('zfssa_initiator: %(ini)s'
' wont be used on '
'zfssa_initiator_group= %(inigrp)s.'),
{'ini': lcfg.zfssa_initiator,
'inigrp': lcfg.zfssa_initiator_group})
# Setup initiator and initiator group
if (lcfg.zfssa_initiator != '' and
lcfg.zfssa_initiator_group != '' and
lcfg.zfssa_initiator_group != 'default'):
for initiator in lcfg.zfssa_initiator.split(','):
self.zfssa.create_initiator(
initiator, lcfg.zfssa_initiator_group + '-' +
initiat
|
Yukarumya/Yukarum-Redfoxes
|
browser/themes/preprocess-tab-svgs.py
|
Python
|
mpl-2.0
| 1,392
| 0.002874
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file,
|
You can obtain one at http://mozilla.org/MPL/2.0/.
import buildconfig
from mozbuild.preprocessor import preprocess
# By default, the pre-processor used for jar.mn will use "%" as a marker
# for ".css" files and "#" otherwise. This falls apart when a file using
# one marker needs to include a file with the other marker since the
# pre-processor instructions in the included file will not be
# processed. The following SVG files need to include a file which uses
# "%" as the marker
|
so we invoke the pre- processor ourselves here with
# the marker specified. The resulting SVG files will get packaged by the
# processing of the jar file in the appropriate directory.
def _do_preprocessing(output_svg, input_svg_file, additional_defines):
additional_defines.update(buildconfig.defines)
return preprocess(output=output_svg,
includes=[input_svg_file],
marker='%',
defines=additional_defines)
def tab_side_start(output_svg, input_svg_file):
return _do_preprocessing(output_svg, input_svg_file, {'TAB_SIDE': 'start'})
def tab_side_end(output_svg, input_svg_file):
return _do_preprocessing(output_svg, input_svg_file, {'TAB_SIDE': 'end'})
|
hamogu/marxs
|
marxs/optics/multiLayerMirror.py
|
Python
|
gpl-3.0
| 7,308
| 0.004379
|
# Licensed under GPL version 3 - see LICENSE.rst
import numpy as np
from astropy.io import ascii
from .base import FlatOpticalElement, FlatStack
from ..math.utils import norm_vector, e2h, h2e
class FlatBrewsterMirror(FlatOpticalElement):
'''Flat mirror operated at the Brewster angle.
Calculation of the Fresnel coefficients can be computationally intense
and also requires knowledge of the refractive index for the appropriate material.
The ``FlatBrewsterMirror`` simplifies this for a mirror that is known to be
operated at the Brewster angle.
This mirror assumes that all photons arrive at the Brewster angle
where only s (senkrecht = direction perpendicular to plane of incidence)
polarisation is reflected.
It also assumes that all photons that are not reflected (i.e. those that
are transmitted) are lost. No transmitted photons are returned, instead the
probability of the reflected photons is adjusted to account for this overall loss.
'''
display = {'color': (0., 1., 0.),
'shape': 'box',
'box-half': '+x',
}
def fresnel(self, photons, intersect, intersection, local):
'''The incident angle can easily be calculated from e_x and photons['dir'].
Returns
-------
refl_s, refl_p : np.array or float
Reflection probability for s and p polarized photons.
Typically, the number will depend on the incident angle and energy
of each photon and thus the return value will be a vector.
'''
return 1., 0.
def specific_process_photons(self, photons, intersect, intersection, local):
directions = norm_vector(photons['dir'].data[intersect])
# save the direction of the incoming photons as beam_dir
beam_dir = h2e(directions)
# reflect the photons (change direction) by transforming to local coordinates
pos4d_inv = np.linalg.inv(self.pos4d)
directions = directions.T
directions = np.dot(pos4d_inv, directions)
directions[0, :] *= -1
directions = np.dot(self.pos4d, directions)
new_beam_dir = directions.T
# split polarization into s and p components
# - s is s polarization (perpendicular to plane of incidence)
# - p is p polarization (in the plane of incidence)
# First, make basis vectors.
v_s = np.cross(beam_dir, self.geometry['e_x'][0:3])
v_s /= np.linalg.norm(v_s, axis=1)[:, np.newaxis]
v_p = np.cross(beam_dir, v_s)
polarization = h2e(photons['polarization'].data[intersect])
p_v_s = np.einsum('ij,ij->i', polarization, v_s)
p_v_p = np.einsum('ij,ij->i', polarization, v_p)
fresnel_refl_s, fresnel_refl_p = self.fresnel(photons, intersect, intersection, local)
# Calculate new intensity ~ (E_x)^2 + (E_y)^2
Es2 = fresnel_refl_s * p_v_s ** 2
Ep2 = fresnel_refl_p * p_v_p ** 2
# parallel transport of polarization vector
# v_s stays the same by definition
new_v_p = np.cross(h2e(new_beam_dir), v_s)
new_pol = norm_vector(-Es2[:, np.newaxis] * v_s + Ep2[:, np.newaxis] * new_v_p)
return {'dir': new_beam_dir,
'probability': Es2 + Ep2,
'polarization': e2h(new_pol, 0)}
class MultiLayerEfficiency(FlatOpticalElement):
'''The Multilayer mirror
|
with varying layer thickness along one axis
The distance between layers (and thus best reflected energy) changes along
the local y axis.
All reflectivity data is assumed to be for a single, desired angle. There
is currently no way to enter varying reflecti
|
on that depends on the angle
of incidence.
Provide reflectivity data in a file with columns:
- 'X(mm)' - position along the "changing" axis (local y axis)
- 'Peak lambda' - wavelength with maximum reflection at a given position
- 'Peak' - maximum reflection at a given position
- 'FWHM(nm)' - full width half max, measure of width of reflection Gaussian peaks
Provide polarization data in a file with columns:
- 'Photon energy' - energy of the photon in keV
- 'Polarization' - Fraction polarized in the more reflective direction, so that
randomly polarized light would have a value of 0.5.
Parameters
----------
reflFile: string
path, filename, and .txt extension for reflection data file
testedPolarization: string
path, filename, and .txt to a text file containing a table with photon energy
and fraction polarization for the light used to test the mirrors and create the
reflectivity file
'''
def __init__(self, **kwargs):
self.fileName = kwargs.pop('reflFile')
self.polFile = kwargs.pop('testedPolarization')
super(MultiLayerEfficiency, self).__init__(**kwargs)
def interp_files(self, photons, local):
# read in correct reflecting probability file, now in table format
reflectFile = ascii.read(self.fileName)
# find reflectivity adjustment due to polarization of light in reflectivity testing
polarizedFile = ascii.read(self.polFile)
tested_polarized_fraction = np.interp(photons['energy'], polarizedFile['Photon energy'] / 1000, polarizedFile['Polarization'])
# find probability of being reflected due to position
local_x = local[:, 0] / np.linalg.norm(self.geometry['v_y'])
local_coords_in_file = reflectFile['X(mm)'] / np.linalg.norm(self.geometry['v_y']) - 1
# interpolate 'Peak lambda', 'Peak' [reflectivity], and 'FWHM(nm)' to the actual photon positions
peak_wavelength = np.interp(local_x, local_coords_in_file, reflectFile['Peak lambda'])
max_refl = np.interp(local_x, local_coords_in_file, reflectFile['Peak']) / tested_polarized_fraction
spread_refl = np.interp(local_x, local_coords_in_file, reflectFile['FWHM(nm)'])
return peak_wavelength, max_refl, spread_refl
def specific_process_photons(self, photons, intersect, intersection, local):
# wavelength is in nm assuming energy is in keV
wavelength = 1.23984282 / photons['energy'].data[intersect]
peak_wavelength, max_refl, spread_refl = self.interp_files(photons[intersect], local[intersect])
# the standard deviation squared of the Gaussian reflectivity functions of each photon's wavelength
c_squared = (spread_refl ** 2) / (8. * np.log(2))
# skip the case when there is no Gaussian (this is assumed to just be the zero function)
c_is_zero = (c_squared == 0)
refl_prob = np.zeros(len(wavelength))
refl_prob[~c_is_zero] = max_refl[~c_is_zero] * np.exp(-((wavelength[~c_is_zero] - peak_wavelength[~c_is_zero]) ** 2) / (2 * c_squared[~c_is_zero]))
return {'probability': refl_prob / 100}
class MultiLayerMirror(FlatStack):
def __init__(self, **kwargs):
super(MultiLayerMirror, self).__init__(elements=[FlatBrewsterMirror, MultiLayerEfficiency],
keywords=[{}, {'reflFile': kwargs.pop('reflFile'),
'testedPolarization': kwargs.pop('testedPolarization')}],
**kwargs)
|
luac/django-argcache
|
src/argcache.py
|
Python
|
agpl-3.0
| 22,405
| 0.00308
|
""" Bulk-deletable cache objects. """
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.core.cache import cache
from django.dispatch import Signal
from django.db.models import signals
from django.conf import settings
from .queued import add_lazy_dependency
from .token import Token, SingleEntryToken
from .key_set import specifies_key, token_list_for
from .marinade import marinade_dish
from .registry import register_cache
from .sad_face import warn_if_loaded
from .signals import cache_deleted
__all__ = ['ArgCache']
# XXX: For now, all functions must have known arity. No *args or
# **kwargs are allowed, but optional arguments are fine. This is done to
# avoid overcomplicating everything, especially this early. If we have a
# compelling reason to extend it, this can be modified later.
# TODO: Don't store each token with the final object, hash them together, to
# avoid a ridiculous blowup in memory. This will make it about as memory
# efficient as tiered caching. May be a performance hit though... not sure.
#
# I think it's worth it though... this also has the side effect that caches
# have tokens and then can be used as handles maybe? Kind of another way to
# express 1-1 dependencies... but offloading work from set() to get(), which
# isn't so nice... Still, perhaps some relations are hard to reverse.
#
# Also, can we pretend we never get a hash collision here?
# TODO: This scheme does not allow for sets that involve things like
# everything-below-this-DataTree-node, which could be very useful for UserBit
# stuff. At least the simplest version could fixed by implementing the
# BatchableCache thing and then extending to allow multiple keys per token
# (well, actually the token will just look at lots of keys without telling
# anyone). This also will allow for depending on param signature and stuff.
# However, this does not scale well. i.e., it's O(depth) work, which may be
# fine for one token, but several? Also, if some token does two such queries at
# once (say... V + Q...). it's O(depth^2) work. The hashing thing will help
# with memory, but still... Gack! I think this is not a blocker though. Tiered
# caching couldn't do this either... this is simply something we don't know how
# to implement at all, as far as I know.
# TODO: To be really useful, this system needs to track old values of fields,
# even though we usually don't do things like reassign Event to another
# Program. This shouldn't be hard... monkey patch the models on a as-needed
# basis.
# TODO: We need to be able to notice when a bunch of things got update and
# stuff...
# FIXME: Actually, this probably doesn't handle subclassing properly at all.
# This is unfortunate.
#
# Possibilities:
#
# 1. Delay until all models are initialized (DONE)
# 2. When connecting signals, make multiple connections for every subclass
# and... do something smart about superclasses??
# - I don't want to need a query just to check if this guy is an instance
# of the class I'm interested in.
# - Also, even if something of the right instance changed, what if I don't
# care about any of the shared fields?
#
# 1. Connect to every model and, in the handler, check whether we care about
# this one.
#
# 1. Mike Price's common base class thing gives everything a unique name AND
# can enforce a type value in there
# FIXME: Need a "nothing" key_set object that's distinct from None... purpose
# is to cancel things nicely in functions like
# "Program.objects.get_if_exists_otherwise"
# TODO: Refactor key_set things from is_wildcard to is_exact or something, so
# that we can extend this to more complex queries in the future. Perhaps each
# token has a can_handle() thing that sees if it can handle each query. This
# will likely depend on an asynchronous-cacher thing.
# TODO: Somehow collapse these duplicate reports... delay signals? Keep track
# of when we last set? problem... multiple processes... I suppose we could use
# a "IPC" mechanism of the cache. Sigh.
# TODO: Depend on external factors (a version cookie on each function when
# needed) and the param signature
# TODO: Properly handle things like staticmethod and classmethod (?)
# FIXME: How to handle things like... ids don't change, but want to be careful
# with the objects themselves.
# TODO: Probably need to allow functions to return like... lists of dicts or
# something, but I'm not very happy with that... probably best to go for the
# more general tokens thing, which more-or-less depends on async caching
class ArgCache(object):
""" Implements a cache that allows for selectively dropping bits of itself. """
CACHE_NONE = {} # we could use a garbage string for this, but it's impossible to collide with the id of a dict.
def __init__(self, name, params, cache=cache, timeout_seconds=None, *args, **kwargs):
super(ArgCache, self).__init__(*args, **kwargs)
if isinstance(params, list):
params = tuple(params)
self.name = name
self.params = params
self.cache = cache
self.timeout_seconds = timeout_seconds
self.tokens = []
self.token_dict = {}
self.locked = False
# Mostly used to avoid recursion
self.disabled = False
# Init stats
self.hit_count = 0
self.miss_count = 0
# Be able to invert param mapping
self.param_dict = {}
for i,param in enumerate(params):
self.param_dict[param] = i
# FIXME:
|
Really ought to depend on param signature.
#self.global_token = ExternalToken(name=name, provided_params=(), external=hash(params))
self.global_token = Token(name=name, provided_
|
params=(), cache=self.cache)
self.add_token(self.global_token)
# Calling in the constructor to avoid duplicates
if warn_if_loaded():
print "Dumping the cache out of paranoia..."
self.delete_all()
self.register()
def _hit_hook(self, arg_list):
if settings.CACHE_DEBUG:
old_disabled, self.disabled = self.disabled, True
print "Cache Hit! %s on %s" % (self.name, arg_list)
self.disabled = old_disabled
self.hit_count += 1
def _miss_hook(self, arg_list):
if settings.CACHE_DEBUG:
old_disabled, self.disabled = self.disabled, True
print "Cache Miss! %s on %s" % (self.name, arg_list)
self.disabled = old_disabled
self.miss_count += 1
@property
def pretty_name(self):
return '%s(%s)' % (self.name, ', '.join(self.params))
# @delay_method # Slightly nontrivial... duplicate-checker needs to know about stuff.
def register(self):
register_cache(self)
# TODO: I really should make a signal thing that provides __get__ so that
# it acts like a bound member function or something really awful like that.
# In fact, I would not be surprised if Django eventually did this, so, for
# insurance, _delete_signal is not a member of ArgCache. :-D
def connect(self, handler):
""" Connect handler to this cache's delete signal. """
cache_deleted.connect(handler, sender=self, weak=False) # local functions will be used a lot, so no weak refs
connect.alters_data = True
def send(self, key
|
manankalra/Twitter-Sentiment-Analysis
|
demo/download.py
|
Python
|
mit
| 145
| 0.006897
|
#!/u
|
sr/bin/env python
"""
Download NLTK data
"""
__author__ = "Ma
|
nan Kalra"
__email__ = "manankalr29@gmail.com"
import nltk
nltk.download()
|
olivierdalang/stdm
|
ui/ui_import_data.py
|
Python
|
gpl-2.0
| 14,198
| 0.002043
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_import_data.ui'
#
# Created: Thu Nov 13 16:30:03 2014
# by: PyQt4 UI code generator 4.10.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_frmImport(object):
def setupUi(self, frmImport):
frmImport.setObjectName(_fromUtf8("frmImport"))
frmImport.resize(564, 503)
frmImport.setWizardStyle(QtGui.QWizard.ModernStyle)
frmImport.setOptions(QtGui.QWizard.HelpButtonOnRight)
self.pgSource = QtGui.QWizardPage()
self.pgSource.setObjectName(_fromUtf8("pgSource"))
self.verticalLayout = QtGui.QVBoxLayout(self.pgSource)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.groupBox = QtGui.QGroupBox(self.pgSource)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.groupBox)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label = QtGui.QLabel(self.groupBox)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_2.addWidget(self.label)
self.txtDataSource = QtGui.QLineEdit(self.groupBox)
self.txtDataSource.setMaxLength(200)
self.txtDataSource.setObjectName(_fromUtf8("txtDataSource"))
self.horizontalLayout_2.addWidget(self.txtDataSource)
self.btnBrowseSource = QtGui.QPushButton(self.groupBox)
self.btnBrowseSource.setObjectName(_fromUtf8("btnBrowseSource"))
self.horizontalLayout_2.addWidget(self.btnBrowseSource)
self.verticalLayout.addWidget(self.groupBox)
self.groupBox_2 = QtGui.QGroupBox(self.pgSource)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.horizontalLayout = QtGui.QHBoxLayout(self.groupBox_2)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.rbTextType = QtGui.QRadioButton(self.groupBox_2)
self.rbTextType.setChecked(True)
self.rbTextType.setObjectName(_fromUtf8("rbTextType"))
self.horizontalLayout.addWidget(self.rbTextType)
self.rbSpType = QtGui.QRadioButton(self.groupBox_2)
self.rbSpType.setObjectName(_fromUtf8("rbSpType"))
self.horizontalLayout.addWidget(self.rbSpType)
self.verticalLayout.addWidget(self.groupBox_2)
frmImport.addPage(self.pgSource)
self.destTable = QtGui.QWizardPage()
self.destTable.setObjectName(_fromUtf8("destTable"))
self.gridLayout_2 = QtGui
|
.QGridLayout(self.destTable)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.groupBox_3 = QtGui.QGroupBox(self.destTable)
|
self.groupBox_3.setObjectName(_fromUtf8("groupBox_3"))
self.gridLayout = QtGui.QGridLayout(self.groupBox_3)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.lstDestTables = QtGui.QListWidget(self.groupBox_3)
self.lstDestTables.setObjectName(_fromUtf8("lstDestTables"))
self.gridLayout.addWidget(self.lstDestTables, 0, 0, 1, 1)
self.gridLayout_2.addWidget(self.groupBox_3, 0, 0, 1, 1)
self.groupBox_4 = QtGui.QGroupBox(self.destTable)
self.groupBox_4.setObjectName(_fromUtf8("groupBox_4"))
self.gridLayout_5 = QtGui.QGridLayout(self.groupBox_4)
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.geomClm = QtGui.QComboBox(self.groupBox_4)
self.geomClm.setEnabled(False)
self.geomClm.setMinimumSize(QtCore.QSize(100, 0))
self.geomClm.setMaximumSize(QtCore.QSize(100, 16777215))
self.geomClm.setObjectName(_fromUtf8("geomClm"))
self.gridLayout_5.addWidget(self.geomClm, 4, 1, 1, 1)
self.label_2 = QtGui.QLabel(self.groupBox_4)
self.label_2.setMaximumSize(QtCore.QSize(16777215, 20))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout_5.addWidget(self.label_2, 4, 0, 1, 1)
self.rbAppend = QtGui.QRadioButton(self.groupBox_4)
self.rbAppend.setChecked(True)
self.rbAppend.setObjectName(_fromUtf8("rbAppend"))
self.gridLayout_5.addWidget(self.rbAppend, 1, 0, 1, 1)
self.rbOverwrite = QtGui.QRadioButton(self.groupBox_4)
self.rbOverwrite.setObjectName(_fromUtf8("rbOverwrite"))
self.gridLayout_5.addWidget(self.rbOverwrite, 2, 0, 1, 2)
self.gridLayout_2.addWidget(self.groupBox_4, 0, 1, 1, 1)
frmImport.addPage(self.destTable)
self.assignColumns = QtGui.QWizardPage()
self.assignColumns.setObjectName(_fromUtf8("assignColumns"))
self.gridLayout_6 = QtGui.QGridLayout(self.assignColumns)
self.gridLayout_6.setObjectName(_fromUtf8("gridLayout_6"))
self.groupBox_5 = QtGui.QGroupBox(self.assignColumns)
self.groupBox_5.setObjectName(_fromUtf8("groupBox_5"))
self.gridLayout_3 = QtGui.QGridLayout(self.groupBox_5)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.lstSrcFields = QtGui.QListWidget(self.groupBox_5)
self.lstSrcFields.setMinimumSize(QtCore.QSize(0, 250))
self.lstSrcFields.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.lstSrcFields.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.lstSrcFields.setObjectName(_fromUtf8("lstSrcFields"))
self.gridLayout_3.addWidget(self.lstSrcFields, 0, 1, 5, 1)
self.btnSrcUp = QtGui.QPushButton(self.groupBox_5)
self.btnSrcUp.setMaximumSize(QtCore.QSize(50, 16777215))
self.btnSrcUp.setText(_fromUtf8(""))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/stdm/images/icons/up.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnSrcUp.setIcon(icon)
self.btnSrcUp.setObjectName(_fromUtf8("btnSrcUp"))
self.gridLayout_3.addWidget(self.btnSrcUp, 0, 0, 1, 1)
self.btnSrcNone = QtGui.QPushButton(self.groupBox_5)
self.btnSrcNone.setMaximumSize(QtCore.QSize(50, 16777215))
self.btnSrcNone.setObjectName(_fromUtf8("btnSrcNone"))
self.gridLayout_3.addWidget(self.btnSrcNone, 3, 0, 1, 1)
self.btnSrcDown = QtGui.QPushButton(self.groupBox_5)
self.btnSrcDown.setMaximumSize(QtCore.QSize(50, 16777215))
self.btnSrcDown.setText(_fromUtf8(""))
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/stdm/images/icons/down.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnSrcDown.setIcon(icon1)
self.btnSrcDown.setObjectName(_fromUtf8("btnSrcDown"))
self.gridLayout_3.addWidget(self.btnSrcDown, 1, 0, 1, 1)
self.btnSrcAll = QtGui.QPushButton(self.groupBox_5)
self.btnSrcAll.setMaximumSize(QtCore.QSize(50, 16777215))
self.btnSrcAll.setObjectName(_fromUtf8("btnSrcAll"))
self.gridLayout_3.addWidget(self.btnSrcAll, 2, 0, 1, 1)
self.gridLayout_6.addWidget(self.groupBox_5, 0, 0, 1, 1)
self.groupBox_6 = QtGui.QGroupBox(self.assignColumns)
self.groupBox_6.setObjectName(_fromUtf8("groupBox_6"))
self.gridLayout_4 = QtGui.QGridLayout(self.groupBox_6)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.label_3 = QtGui.QLabel(self.groupBox_6)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout_4.addWidget(self.label_3, 4, 0, 1, 1)
self.btn_delete_translator = QtGui.QToolButton(self.groupBox_6)
self.btn_delete_translator.setMinimumSize(QtCore.QSize(0, 0))
self.btn_delete_tr
|
duythanhphan/qt-creator
|
tests/system/suite_HELP/tst_HELP06/test.py
|
Python
|
lgpl-2.1
| 8,439
| 0.009835
|
#############################################################################
##
## Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
## Contact: http://www.qt-project.org/legal
##
## This file is part of Qt Creator.
##
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and Digia. For licensing terms and
## conditions see http://qt.digia.com/licensing. For further information
## use the contact form at http://qt.digia.com/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 2.1 requirements
## will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, Digia gives you certain additional
## rights. These rights are described in the Digia Qt LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
#############################################################################
source("../../shared/qtcreator.py")
source("../../shared/suites_qtta.py")
# test bookmark functionality
def renameBookmarkFolder(view, item, newName):
invokeContextMenuItemOnBookmarkFol
|
der(view, item, "Rename Folder")
replaceEditorContent(waitForObject(":Add Bookmark.treeView_QExpandingLineEdit"), newName)
type(waitForObject(":Add Bookmark.treeView_QExpandingLineEdit"), "<Return>")
return
def invokeContextMenuItemOnBookmarkFolder(view, item, menuItem):
aboveWidget = "{name='line' type='QFrame' visible='1' window=':Add Bookmark_BookmarkDialog'}"
mouseClick(waitForObjectItem(view, item), 5, 5, 0, Qt.LeftButton)
openItemContextMenu(view, item, 5, 5, 0
|
)
activateItem(waitForObject("{aboveWidget=%s type='QMenu' unnamed='1' visible='1' "
"window=':Add Bookmark_BookmarkDialog'}" % aboveWidget), menuItem)
def getQModelIndexStr(textProperty, container):
if (container.startswith(":")):
container = "'%s'" % container
return ("{column='0' container=%s %s type='QModelIndex'}"
% (container, textProperty))
def main():
startApplication("qtcreator" + SettingsPath)
if not startedWithoutPluginError():
return
# goto help mode and click on topic
switchViewTo(ViewConstants.HELP)
manualQModelIndex = getQModelIndexStr("text?='Qt Creator Manual *'",
":Qt Creator_QHelpContentWidget")
doubleClick(manualQModelIndex, 5, 5, 0, Qt.LeftButton)
gettingStartedQModelIndex = getQModelIndexStr("text='Getting Started'", manualQModelIndex)
doubleClick(gettingStartedQModelIndex, 5, 5, 0, Qt.LeftButton)
mouseClick(waitForObject(getQModelIndexStr("text='Building and Running an Example'",
gettingStartedQModelIndex)), 5, 5, 0, Qt.LeftButton)
# open bookmarks window
clickButton(waitForObject(":Qt Creator.Add Bookmark_QToolButton"))
clickButton(waitForObject(":Add Bookmark.ExpandBookmarksList_QToolButton"))
# create root bookmark directory
clickButton(waitForObject(":Add Bookmark.New Folder_QPushButton"))
# rename root bookmark directory
bookmarkView = waitForObject(":Add Bookmark.treeView_QTreeView")
renameBookmarkFolder(bookmarkView, "New Folder*", "Sample")
# create two more subfolders
clickButton(waitForObject(":Add Bookmark.New Folder_QPushButton"))
renameBookmarkFolder(bookmarkView, "Sample.New Folder*", "Folder 1")
clickButton(waitForObject(":Add Bookmark.New Folder_QPushButton"))
renameBookmarkFolder(bookmarkView, "Sample.Folder 1.New Folder*", "Folder 2")
clickButton(waitForObject(":Add Bookmark.OK_QPushButton"))
mouseClick(manualQModelIndex, 5, 5, 0, Qt.LeftButton)
type(waitForObject(":Qt Creator_QHelpContentWidget"), "<Down>")
clickButton(waitForObject(":Qt Creator.Add Bookmark_QToolButton"))
clickButton(waitForObject(":Add Bookmark.ExpandBookmarksList_QToolButton"))
# click on "Sample" and create new directory under it
mouseClick(waitForObject(getQModelIndexStr("text='Sample'", ":Add Bookmark.treeView_QTreeView")))
clickButton(waitForObject(":Add Bookmark.New Folder_QPushButton"))
clickButton(waitForObject(":Add Bookmark.OK_QPushButton"))
# choose bookmarks
mouseClick(waitForObjectItem(":Qt Creator_Core::Internal::CommandComboBox", "Bookmarks"))
# verify if all folders are created and bookmarks present
sampleQModelIndex = getQModelIndexStr("text='Sample'", ":Qt Creator_Bookmarks_TreeView")
folder1QModelIndex = getQModelIndexStr("text='Folder 1'", sampleQModelIndex)
folder2QModelIndex = getQModelIndexStr("text='Folder 2'", folder1QModelIndex)
bldRunQModelIndex = getQModelIndexStr("text?='QtCreator : Building and Running an Example*'",
folder2QModelIndex)
newFolderQModelIndex = getQModelIndexStr("text='New Folder'", sampleQModelIndex)
manualQModelIndex = getQModelIndexStr("text='QtCreator : Qt Creator Manual'",
newFolderQModelIndex)
test.verify(checkIfObjectExists(sampleQModelIndex, verboseOnFail = True) and
checkIfObjectExists(folder1QModelIndex, verboseOnFail = True) and
checkIfObjectExists(folder2QModelIndex, verboseOnFail = True) and
checkIfObjectExists(bldRunQModelIndex, verboseOnFail = True) and
checkIfObjectExists(manualQModelIndex, verboseOnFail = True),
"Verifying if all folders and bookmarks are present")
mouseClick(waitForObject(":Qt Creator_Bookmarks_TreeView"), 5, 5, 0, Qt.LeftButton)
for i in range(6):
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Right>")
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Return>")
test.verify("QtCreator : Building and Running an Example" in str(waitForObject(":Qt Creator_Help::Internal::HelpViewer").title),
"Verifying if first bookmark is opened")
mouseClick(waitForObject(bldRunQModelIndex))
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Down>")
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Right>")
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Down>")
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Return>")
test.verify("QtCreator : Qt Creator Manual" in str(waitForObject(":Qt Creator_Help::Internal::HelpViewer").title),
"Verifying if second bookmark is opened")
# delete previously created directory
clickButton(waitForObject(":Qt Creator.Add Bookmark_QToolButton"))
clickButton(waitForObject(":Add Bookmark.ExpandBookmarksList_QToolButton"))
invokeContextMenuItemOnBookmarkFolder(":Add Bookmark.treeView_QTreeView", "Sample.Folder 1",
"Delete Folder")
clickButton(waitForObject("{container=':Add Bookmark.treeView_QTreeView' text='Yes' "
"type='QPushButton' unnamed='1' visible='1'}"))
# close bookmarks
clickButton(waitForObject(":Add Bookmark.OK_QPushButton"))
# choose bookmarks from command combobox
mouseClick(waitForObject(":Qt Creator_Core::Internal::CommandComboBox"))
mouseClick(waitForObjectItem(":Qt Creator_Core::Internal::CommandComboBox", "Bookmarks"))
# verify if folders and bookmark deleted
test.verify(checkIfObjectExists(sampleQModelIndex, verboseOnFail = True) and
checkIfObjectExists(folder1QModelIndex, shouldExist = False, verboseOnFail = True) and
checkIfObjectExists(folder2QModelIndex, shouldExist = False, verboseOnFail = True) and
checkIfObjectExists(bldRunQModelIndex,
|
hlin117/statsmodels
|
statsmodels/graphics/plottools.py
|
Python
|
bsd-3-clause
| 634
| 0.009464
|
import numpy as np
def rainbow(n):
"""
Returns a list of colors sampled at equal intervals over the spectrum.
Parameters
----------
n : int
The number of colors to return
Returns
|
-------
R : (n,3) array
An of rows of RGB color values
Notes
-----
Converts from HSV coordinates (0, 1, 1) to (1, 1, 1) to RGB. Based on
the Sage function of the same name.
"""
from matplotlib import colors
R = np.ones((1,n,3))
R[0,:,0] = np.linspace(0, 1, n, e
|
ndpoint=False)
#Note: could iterate and use colorsys.hsv_to_rgb
return colors.hsv_to_rgb(R).squeeze()
|
peheje/baselines
|
toy_examples/windy_gridworld/q_learning.py
|
Python
|
mit
| 1,351
| 0.00148
|
import time
from copy import copy
import numpy as np
from gridworld import Gridworld, Position
from utilities.utils import argmax_random_tie
# START
rows = 7
cols = 10
world = Gridworld(rows, cols)
moveset = [
# Position(1, 1),
# Position(1, -1),
# Position(-1, -1),
# Position(-1, 1),
Position(1, 0), # down
Position(0, -1), # left
Position(0, 1), # right
Position(-1, 0) # up
]
alpha = 0.2
e = 0.3
lambd = 0.2
e_decay = 0.9999
n_episodes = 100000
print_every = 10000
q = np.zeros((rows, cols, len(moveset)))
for episode in range(n_episodes):
print("episode {}".format(episode))
world.reset()
s = world.pos
found = False
e *= e_decay
step = 0
while not found:
step += 1
world.wind()
a = np.random.randint(0, len(moveset)) if np.random.uniform() < e else argmax_random_tie(q[s.row][s.col])
r = world.move(moveset[a])
sn = world.pos
q[s.row][s.col][a] += alpha * (r + lambd * np.max(q[sn.row][sn.col]) - q[s.row][s.col][a])
s = copy(sn)
if s == world.goal:
found = True
if ep
|
isode % print_every == 0 and step < 30:
print(world)
|
time.sleep(0.1)
if found:
print("found goal, epsilon {} in steps {}".format(e, step))
time.sleep(3)
|
jawilson/home-assistant
|
tests/components/greeneye_monitor/test_sensor.py
|
Python
|
apache-2.0
| 7,421
| 0.00283
|
"""Tests for greeneye_monitor sensors."""
from unittest.mock import AsyncMock, MagicMock
from homeassistant.components.greeneye_monitor.sensor import (
DATA_PULSES,
DATA_WATT_SECONDS,
)
from homeassistant.const import STATE_UNKNOWN
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_registry import async_get as get_entity_registry
from .common import (
SINGLE_MONITOR_CONFIG_POWER_SENSORS,
SINGLE_MONITOR_CONFIG_PULSE_COUNTERS,
SINGLE_MONITOR_CONFIG_TEMPERATURE_SENSORS,
SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS,
SINGLE_MONITOR_SERIAL_NUMBER,
mock_monitor,
setup_greeneye_monitor_component_with_config,
)
from .conftest import assert_sensor_state
async def test_disable_sensor_before_monitor_connected(
hass: HomeAssistant, monitors: AsyncMock
) -> None:
"""Test that a sensor disabled before its monitor connected stops listening for new monitors."""
# The sensor base class handles connecting the monitor, so we test this with a single voltage sensor for ease
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS
)
assert len(monitors.listeners) == 1
await disable_entity(hass, "sensor.voltage_1")
assert len(monitors.listeners) == 0 # Make sure we cleaned up the listener
async def test_updates_state_when_monitor_connected(
hass: HomeAssistant, monitors: AsyncMock
) -> None:
"""Test that a sensor updates its state when its monitor first connects."""
# The sensor base class handles updating the state on connection, so we test this with a single voltage sensor for ease
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS
)
assert_sensor_state(hass, "sensor.voltage_1", STATE_UNKNOWN)
assert len(monitors.listeners) == 1
connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert len(monitors.listeners) == 0 # Make sure we cleaned up the listener
assert_sensor_state(hass, "sensor.voltage_1", "120.0")
async def test_disable_sensor_after_monitor_connected(
hass: HomeAssistant, monitors: AsyncMock
) -> None:
"""Test that a sensor disabled after its monitor connected stops listening for sensor changes."""
# The sensor base class handles connecting the monitor, so we test this with a single voltage sensor for ease
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS
)
monitor = connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert len(monitor.listeners) == 1
await disable_entity(hass, "sensor.voltage_1")
assert len(monitor.listeners) == 0
async def test_updates_state_when_sensor_pushes(
hass: HomeAssistant, monitors: AsyncMock
) -> None:
"""Test that a sensor entity updates its state when the underlying sensor pushes an update."""
# The sensor base class handles triggering state updates, so we test this with a single voltage sensor for ease
await s
|
etup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS
)
mo
|
nitor = connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert_sensor_state(hass, "sensor.voltage_1", "120.0")
monitor.voltage = 119.8
monitor.notify_all_listeners()
assert_sensor_state(hass, "sensor.voltage_1", "119.8")
async def test_power_sensor_initially_unknown(
hass: HomeAssistant, monitors: AsyncMock
) -> None:
"""Test that the power sensor can handle its initial state being unknown (since the GEM API needs at least two packets to arrive before it can compute watts)."""
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_POWER_SENSORS
)
connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert_sensor_state(
hass, "sensor.channel_1", STATE_UNKNOWN, {DATA_WATT_SECONDS: 1000}
)
# This sensor was configured with net metering on, so we should be taking the
# polarized value
assert_sensor_state(
hass, "sensor.channel_two", STATE_UNKNOWN, {DATA_WATT_SECONDS: -400}
)
async def test_power_sensor(hass: HomeAssistant, monitors: AsyncMock) -> None:
"""Test that a power sensor reports its values correctly, including handling net metering."""
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_POWER_SENSORS
)
monitor = connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
monitor.channels[0].watts = 120.0
monitor.channels[1].watts = 120.0
monitor.channels[0].notify_all_listeners()
monitor.channels[1].notify_all_listeners()
assert_sensor_state(hass, "sensor.channel_1", "120.0", {DATA_WATT_SECONDS: 1000})
# This sensor was configured with net metering on, so we should be taking the
# polarized value
assert_sensor_state(hass, "sensor.channel_two", "120.0", {DATA_WATT_SECONDS: -400})
async def test_pulse_counter(hass: HomeAssistant, monitors: AsyncMock) -> None:
"""Test that a pulse counter sensor reports its values properly, including calculating different units."""
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_PULSE_COUNTERS
)
connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert_sensor_state(hass, "sensor.pulse_a", "10.0", {DATA_PULSES: 1000})
# This counter was configured with each pulse meaning 0.5 gallons and
# wanting to show gallons per minute, so 10 pulses per second -> 300 gal/min
assert_sensor_state(hass, "sensor.pulse_2", "300.0", {DATA_PULSES: 1000})
# This counter was configured with each pulse meaning 0.5 gallons and
# wanting to show gallons per hour, so 10 pulses per second -> 18000 gal/hr
assert_sensor_state(hass, "sensor.pulse_3", "18000.0", {DATA_PULSES: 1000})
async def test_temperature_sensor(hass: HomeAssistant, monitors: AsyncMock) -> None:
"""Test that a temperature sensor reports its values properly, including proper handling of when its native unit is different from that configured in hass."""
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_TEMPERATURE_SENSORS
)
connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
# The config says that the sensor is reporting in Fahrenheit; if we set that up
# properly, HA will have converted that to Celsius by default.
assert_sensor_state(hass, "sensor.temp_a", "0.0")
async def test_voltage_sensor(hass: HomeAssistant, monitors: AsyncMock) -> None:
"""Test that a voltage sensor reports its values properly."""
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS
)
connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert_sensor_state(hass, "sensor.voltage_1", "120.0")
def connect_monitor(monitors: AsyncMock, serial_number: int) -> MagicMock:
"""Simulate a monitor connecting to Home Assistant. Returns the mock monitor API object."""
monitor = mock_monitor(serial_number)
monitors.add_monitor(monitor)
return monitor
async def disable_entity(hass: HomeAssistant, entity_id: str) -> None:
"""Disable the given entity."""
entity_registry = get_entity_registry(hass)
entity_registry.async_update_entity(entity_id, disabled_by="user")
await hass.async_block_till_done()
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractFaelicyTumblrCom.py
|
Python
|
bsd-3-clause
| 674
| 0.02819
|
def extractFaelicyTumblrCom(item):
'''
Parser for 'faelicy.tumblr.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in
|
item['title'].lower():
return None
tagmap = [
('the scum villain\'s self saving system', 'the scum villain\'s self saving system', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
retu
|
rn False
|
conan-io/conan
|
conans/client/packager.py
|
Python
|
mit
| 1,979
| 0.002527
|
import os
from conans.client.file_copier import FileCopier, report_copied_files
from conans.model.manifest import FileTreeManifest
from conans.paths import CONANINFO
from conans.util.files import mkdir, save
def export_pkg(conanfile, package_id, src_package_folder, hook_manager, conanfile_path, ref):
# NOTE: The layout folder is not taken into account for the cache, it is not useful to introduce
# a subfolder there.
mkdir(conanfile.package_folder)
output = conanfile.output
output.info("Exporting to cache existing package from user folder")
output.info("Package folder %s" % conanfile.package_folder)
hook_manager.execute("pre_package", conanfile=conanfile, conanfile_path=conanfile_path,
reference=ref, package_id=package_id)
copier = F
|
ileCopier([src_package_folder], conanfile.package_folder)
copier("*", symlinks=True)
hook_manager.execute("post_package", conanfile=conanfile, conanfile_path=conanfile_path,
reference=ref, pa
|
ckage_id=package_id)
save(os.path.join(conanfile.package_folder, CONANINFO), conanfile.info.dumps())
manifest = FileTreeManifest.create(conanfile.package_folder)
manifest.save(conanfile.package_folder)
report_files_from_manifest(output, manifest)
output.success("Package '%s' created" % package_id)
prev = manifest.summary_hash
output.info("Created package revision %s" % prev)
return prev
def update_package_metadata(prev, layout, package_id, rrev):
with layout.update_metadata() as metadata:
metadata.packages[package_id].revision = prev
metadata.packages[package_id].recipe_revision = rrev
def report_files_from_manifest(output, manifest):
copied_files = list(manifest.files())
copied_files.remove(CONANINFO)
if not copied_files:
output.warn("No files in this package!")
return
report_copied_files(copied_files, output, message_suffix="Packaged")
|
notesoftware/notestudio
|
main.py
|
Python
|
gpl-2.0
| 8,246
| 0.02877
|
# coding: utf-8
#import pygame
from Tkinter import *
import ttk
import time
from PIL import ImageTk,Image
from functools import partial
import os
import tkMessageBox
from urllib2 import *
from threading import Thread
import urllib as u
from window import *
############################################################################################ İNTERNET BAĞLANTISI KONTROL
def netControl():
try:
u.urlopen("http://example.com")
return True
except Exception as e:
print(e.message)
return False
if(not netControl()):
tkMessageBox.showwarning("Hata","Bu programı şu an internet bağlantısı olmadan kullanamazsınız!")
sys.exit(0)
############################################################################################
####################################################################################### ANA SINIF
class NoteStudio:
def __init__(self):
self.pencere = Tk()
self.rgb = "#008aff"
# ortalamak için
self.h = ((self.pencere.winfo_screenheight())/2)-(142/2)
self.w = ((self.pencere.winfo_screenwidth())/2)-(712/2)
self.pencere.overrideredirect(1)
self.pencere.resizable(width = FALSE,height = FALSE)
self.pencere.geometry("712x142+{0}+{1}".format(self.w,self.h))
self.pencere.title("NoteStudio 1.0")
self.pencere.iconbitmap("image/logo.ico")
self.img = ImageTk.PhotoImage(Image.open("image/banner.png"))
self.panel = Label(self.pencere,image = self.img)
self.panel.pack(side = "bottom", fill = "both", expand = "yes")
self.pencere.after(0,partial(self.efekt,0.1,0,durum = 1))
self.pencere.after(1500,self.start)
self.pencere.mainloop()
def efekt(self,alfa,sayac = 0,durum = 0,event = None): # efektli açılış ekranı
if(sayac < 1):
if(durum):
self.pencere.wm_attributes('-alpha',alfa)
alfa += 0.1
if(alfa>=0.9):
durum = 0
self.pencere.after(50,partial(self.efekt,0.9,sayac+1,durum))
else:
self.pencere.after(50,partial(self.efekt,alfa,sayac,durum))
else:
self.pencere.wm_attributes('-alpha',alfa)
alfa -= 0.1
if(alfa<=0.0):
durum = 1
self.pencere.after(50,partial(self.efekt,alfa,sayac,durum))
else:
self.pencere.after(50,partial(self.efekt,alfa,sayac,durum))
else:
self.pencere.wm_attributes('-alpha',1)
def start(self):
self.h = ((self.pencere.winfo_screenheight())/2)-300
self.w = ((self.pencere.winfo_screenwidth())/2)-400
self.panel.destroy()
self.img = ImageTk.PhotoImage(Image.open("image/background.png"))
self.panel = Label(self.pencere,image = self.img)
self.panel.place(x = 0,
y = 0)
self.pencere.wm_attributes('-alpha',1)
self.pencere.geometry("810x600+{0}+{1}".format(self.w,self.h))
self.pencere.overrideredirect(False)
self.pencere.tk_setPalette("black")
Thread(target = self.ip,args =(),).start()
self.banner = Label(self.pencere,
text = "© NoteStudio 1.1",
bg = self.rgb,
fg = "black")
self.banner.pack(side = BOTTOM,fill = X)
self.islemListe = [{"buton":"Whois Çekme",
#"pencere":self.Pencere,
"title":"NoteStudio Whois",
"text":"Whois bilgisi çekme",
"bilgi":"IP adresi yada Domain",
"fonk":"whois"},
{"buton":"CloudFlare\nTespiti",
#"pencere":self.Pencere,
"title":"NoteStudio CloudFlare",
"text":"Hedefte CloudFlare Tespiti",
"bilgi":"IP adresi yada Domain",
"fonk":"cloudflare"},
{"buton":"IP location",
#"pencere":self.Pencere,
"title":"NoteStudio IPlocation",
"text":"IP adresinden yer bulma",
"bilgi":"IP adresi girin:",
"fonk":"location"},
{"buton":"HoneyPot",
#"pencere":self.Pencere,
"title":"NoteStudio HoneyPot",
"text":"Hedef sistemde HoneyPot oranı",
"bilgi":"IP adresi",
"fonk":"honeypot"},
{"buton":"HTTP Header Grabber",
#"pencere":self.Pencere,
"title":"NoteStudio HeaderGrabber",
"text":"Web sitesi başlık bilgileri",
"bilgi":"IP adresi yada Domain",
"fonk":"header"},
#["Port Scan",self.Pencere,"NoteStudio PortScan","Hedef sistem port tarama","IP adresi yada Domain"],
{"buton":"Robots.txt",
#"pencere":self.Pencere,
"title":"NoteStudio robots.txt",
"text":"Hedef sistemde robots.txt tespiti",
"bilgi":"Domain (http(s)://) ile yazın",
"fonk":"robot"},
{"buton":"Link Grabber",
#"pencere":self.Pencere,
"title":"NoteStudio LinkGrabber",
"text":"Hedef sistemde link taraması",
"bilgi":"IP adresi yada Domain",
"fonk":"link"},
{"buton":"Traceroute",
#"pencere":self.Pencere,
"title":"NoteStudio TraceRoute",
"text":"Hedef sisteme giden yolu izleme",
"bilgi":"IP adresi yada Domain",
"fonk":"trace"},
{"buton":"Zone Transfer",
#"pencere":self.Pencere,
"title":"NoteStudio ZoneTransfer",
"text":"Hedef sistem zone tespiti",
"bilgi":"IP adresi yada Domain",
"fonk":"zone"},
]
sira = 0
for i in self.islemListe:
Window(master = self.pencere,
no = sira,
text = i["buton"],
pTitle = i["title"],
pText = i["text"],
pBilgi = i["bilgi"],
#command = i["pencere"],
fonksiyon = i["fonk"] or None)
sira += 1
if(sira>=len(self.islemListe)):
break
hakkindaB = Window(master = self.pencere,
no = 9,
text = "Hakkında/Beni Oku",
pTitle = "Hakkında",
pText = "Hakkında",
pBilgi = "Hakkında")
hakkinda
|
B.buton["command"] = self.hakkinda
cikisB = Window(master = self.pencere,
no = 10,
text = "Çıkış",
pTitle = "Çıkış",
pText = "Çıkış",
pBilgi = "Çıkış")
cikisB.buton["command"] = self.cik
def ip(self):
ipAdres = u.urlopen("http://ipv4bot.whatismyipaddress.com").read()
self.banner["text"] = self.banner["text"] + " | IP: {}".format(ipAdres)
def hakkinda(self):
mesaj
|
= "NoteStudio 1.1"
tkMessageBox.showinfo("NoteStudio",mesaj)
def cik(self):
self.pencere.destroy()
sys.exit(0)
|
davogler/POSTv3
|
customers/migrations/0002_auto_20150405_1041.py
|
Python
|
mit
| 1,800
| 0.002778
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('customers', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='address',
name='recipient',
),
migrations.DeleteModel(
name='Address',
),
migrations.AddField(
model_name='recipient',
name='address_line1',
field=models.CharField(max_length=45, verbose_name=b'Address line 1', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='recipient',
name='address_line2',
field=models.CharField(max_length=45, verbose_name=b'Address line 2', blank=True),
preserve_default=True,
),
migration
|
s.AddField(
model_name='recipient',
name='city',
field=models.CharField(max_length=50, blank=True),
|
preserve_default=True,
),
migrations.AddField(
model_name='recipient',
name='country',
field=models.CharField(max_length=40, verbose_name=b'Country', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='recipient',
name='postal_code',
field=models.CharField(max_length=10, verbose_name=b'Postal Code', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='recipient',
name='state_province',
field=models.CharField(max_length=40, verbose_name=b'State/Province', blank=True),
preserve_default=True,
),
]
|
nschloe/quadpy
|
src/quadpy/s2/_kim_song/__init__.py
|
Python
|
mit
| 1,946
| 0
|
# ENH quadpy-optimize
import pathlib
from ...helpers import article
from .._helpers import _read, register
_source = article(
authors=["KyoungJoong Kim", "ManSuk Song"],
title="Symmetric quadrature formulas over a unit disk",
journal="Korean J. Comp. & Appl. Math.",
year="1997",
volume="4",
pages="179-192",
url="https://doi.org/10.1007/BF03011388",
)
this_dir = pathlib.Path(__file__).resolve().parent
def kim_song_1():
return _read(this_dir / "kim_song_01.json", _source)
def kim_song_2():
return _read(this_dir / "kim_song_02.json", _source)
def kim_song_3():
return _read(this_dir / "kim_song_03.json", _source)
def kim_song_4():
return _read(this_dir / "kim_so
|
ng_04.json", _source)
def kim_song_5():
return _read(this_dir / "kim_song_05.json", _source)
def kim_song_6():
return _read(this_dir / "kim_song_06.json", _source)
def kim_song_7():
return _read(this_dir / "kim_song_07.json", _source)
# TODO find issue
def kim_song_8():
|
return _read(this_dir / "kim_song_08.json", _source)
def kim_song_9():
return _read(this_dir / "kim_song_09.json", _source)
def kim_song_10():
return _read(this_dir / "kim_song_10.json", _source)
def kim_song_11():
return _read(this_dir / "kim_song_11.json", _source)
def kim_song_12():
return _read(this_dir / "kim_song_12.json", _source)
def kim_song_13():
return _read(this_dir / "kim_song_13.json", _source)
def kim_song_14():
return _read(this_dir / "kim_song_14.json", _source)
def kim_song_15():
return _read(this_dir / "kim_song_15.json", _source)
register(
[
kim_song_1,
kim_song_2,
kim_song_3,
kim_song_4,
kim_song_5,
kim_song_6,
kim_song_7,
kim_song_8,
kim_song_9,
kim_song_10,
kim_song_11,
kim_song_12,
kim_song_13,
kim_song_14,
kim_song_15,
]
)
|
a301-teaching/a301_code
|
a301utils/a301_readfile.py
|
Python
|
mit
| 2,239
| 0.011166
|
"""
download a file named filename from the atsc301 downloads directory
and save it as a local file with the same name.
command line example::
python -m a301utils.a301_readfile photon_data.csv
module example::
from a3
|
01utils.a301_readfile import download
download('photon_data.csv')
"""
import argparse
import requests
from pathlib import Path
import sys
import os
import shutil
def download(filename):
"""
copy file filename from http://clouds.eos.ubc.ca/~phil/courses/atsc301/down
|
loads to
the local directory
Parameters
----------
filename: string
name of file to fetch from
Returns
-------
Side effect: Creates a copy of that file in the local directory
"""
url = 'https://clouds.eos.ubc.ca/~phil/courses/atsc301/downloads/{}'.format(filename)
filepath = Path('./{}'.format(filename))
if filepath.exists():
the_size = filepath.stat().st_size
print(('\n{} already exists\n'
'and is {} bytes\n'
'will not overwrite\n').format(filename,the_size))
return None
tempfile = str(filepath) + '_tmp'
temppath = Path(tempfile)
with open(tempfile, 'wb') as localfile:
response = requests.get(url, stream=True)
if not response.ok:
print('response: ',response)
raise Exception('Something is wrong, requests.get() failed with filename {}'.format(filename))
for block in response.iter_content(1024):
if not block:
break
localfile.write(block)
the_size=temppath.stat().st_size
if the_size < 10.e3:
print('Warning -- your file is tiny (smaller than 10 Kbyte)\nDid something go wrong?')
shutil.move(tempfile,filename)
the_size=filepath.stat().st_size
print('downloaded {}\nsize = {}'.format(filename,the_size))
return None
if __name__ == "__main__":
linebreaks=argparse.RawTextHelpFormatter
descrip=__doc__.lstrip()
parser = argparse.ArgumentParser(formatter_class=linebreaks,description=descrip)
parser.add_argument('filename',type=str,help='name of file to download')
args=parser.parse_args()
download(args.filename)
|
MungoRae/home-assistant
|
homeassistant/components/sensor/knx.py
|
Python
|
apache-2.0
| 5,476
| 0
|
"""
Sensors of a KNX Device.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/knx/
"""
from enum import Enum
import logging
import voluptuous as vol
from homeassistant.const import (
CONF_NAME, CONF_MAXIMUM, CONF_MINIMUM,
CONF_TYPE, TEMP_CELSIUS
)
from homeassistant.components.knx import (KNXConfig, KNXGroupAddress)
from homeassistant.components.sensor import PLATFORM_SCHEMA
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['knx']
DEFAULT_NAME = "KNX sensor"
CONF_TEMPERATURE = 'temperature'
CONF_ADDRESS = 'address'
CONF_ILLUMINANCE = 'illuminance'
CONF_PERCENTAGE = 'percentage'
CONF_SPEED_MS = 'speed_ms'
class KNXAddressType(Enum):
"""Enum to indicate conversion type for the KNX address."""
FLOAT = 1
PERCENT = 2
# define the fixed settings required for each sensor type
FIXED_SETTINGS_MAP = {
# Temperature as defined in KNX Standard 3.10 - 9.001 DPT_Value_Temp
CONF_TEMPERATURE: {
'unit': TEMP_CELSIUS,
'default_minimum': -273,
'default_maximum': 670760,
'address_type': KNXAddressType.FLOAT
},
# Speed m/s as defined in KNX Standard 3.10 - 9.005 DPT_Value_Wsp
CONF_SPEED_MS: {
'unit': 'm/s',
'default_minimum': 0,
'default_maximum': 670760,
'address_type': KNXAddressType.FLOAT
},
# Luminance(LUX) as defined in KNX Standard 3.10 - 9.004 DPT_Value_Lux
CONF_ILLUMINANCE: {
'unit': 'lx',
'default_minimum': 0,
'default_maximum': 670760,
'address_type': KNXAddressType.FLOAT
},
# Percentage(%) as defined in KNX Standard 3.10 - 5.001 DPT_Scaling
CONF_PERCENTAGE: {
'unit': '%',
'default_minimum': 0,
'default_maximum': 100,
'address_type': KNXAddressType.PERCENT
}
}
SENSOR_TYPES = set(FIXED_SETTINGS_MAP.keys())
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_TYPE): vol.In(SENSOR_TYPES),
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MINIMUM): vol.Coerce(float),
vol.Optional(CONF_MAXIMUM): vol.Coerce(float)
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the KNX Sensor platform."""
add_devices([KNXSensor(hass, KNXConfig(config))])
class KNXSensor(KNXGroupAddress):
"""Representation of a KNX Sensor device."""
def __init__(self, hass, config):
"""Initialize a KNX Float Sensor."""
# set up the KNX Group address
KNXGroupAddress.__init__(self, hass, config)
device_type = config.config.get(CONF_TYPE)
sensor_config = FIXED_SETTINGS_MAP.get(device_type)
|
if not sensor_config:
raise NotImplementedError()
# set up the conversion function based on the address type
address_type = sensor_config.get('address_type')
if address_type == KNXAddressType.FLOAT:
self.convert = convert_float
elif address_type == KNXAddressType.PERCENT:
self.convert = convert_percent
|
else:
raise NotImplementedError()
# other settings
self._unit_of_measurement = sensor_config.get('unit')
default_min = float(sensor_config.get('default_minimum'))
default_max = float(sensor_config.get('default_maximum'))
self._minimum_value = config.config.get(CONF_MINIMUM, default_min)
self._maximum_value = config.config.get(CONF_MAXIMUM, default_max)
_LOGGER.debug(
"%s: configured additional settings: unit=%s, "
"min=%f, max=%f, type=%s",
self.name, self._unit_of_measurement,
self._minimum_value, self._maximum_value, str(address_type)
)
self._value = None
@property
def state(self):
"""Return the Value of the KNX Sensor."""
return self._value
@property
def unit_of_measurement(self):
"""Return the defined Unit of Measurement for the KNX Sensor."""
return self._unit_of_measurement
def update(self):
"""Update KNX sensor."""
super().update()
self._value = None
if self._data:
if self._data == 0:
value = 0
else:
value = self.convert(self._data)
if self._minimum_value <= value <= self._maximum_value:
self._value = value
@property
def cache(self):
"""We don't want to cache any Sensor Value."""
return False
def convert_float(raw_value):
"""Conversion for 2 byte floating point values.
2byte Floating Point KNX Telegram.
Defined in KNX 3.7.2 - 3.10
"""
from knxip.conversion import knx2_to_float
from knxip.core import KNXException
try:
return knx2_to_float(raw_value)
except KNXException as exception:
_LOGGER.error("Can't convert %s to float (%s)", raw_value, exception)
def convert_percent(raw_value):
"""Conversion for scaled byte values.
1byte percentage scaled KNX Telegram.
Defined in KNX 3.7.2 - 3.10.
"""
value = 0
try:
value = raw_value[0]
except (IndexError, ValueError):
# pknx returns a non-iterable type for unsuccessful reads
_LOGGER.error("Can't convert %s to percent value", raw_value)
return round(value * 100 / 255)
|
dshearer/jobber
|
platform_tests/keywords/testlib.py
|
Python
|
mit
| 27,023
| 0.001887
|
import subprocess as sp
import os
import stat
import shutil
import tempfile
import pwd
import time
import json
import yaml
_NORMUSER = 'normuser'
_RUNNER_LOG_FILE_FOR_ROOT = '/root/.jobber-log'
_RUNNER_LOG_FILE_FOR_NORMUSER = '/home/{0}/.jobber-log'.\
format(_NORMUSER)
_CONFIG_PATH = '/etc/jobber.conf'
_OLD_CONFIG_PATH = '/etc/jobber.conf.old'
_NOTIFY_PROGRAM = '''
import json
import sys
def main():
data = json.load(sys.stdin)
with open('{notify_output_path}', 'w') as f:
f.write("succeeded: {{0}}, status: {{1}}".format(
data['succeeded'],
data['job']['status']
))
if __name__ == '__main__':
main()
'''
_OCTAL_777 = int('777', base=8)
_OCTAL_755 = int('755', base=8)
_OCTAL_600 = int('600', base=8)
class _ProcInfo(object):
'''Info about a process'''
def __init__(self, pid, username, uid, tty, program):
self.pid = pid
self.username = username
self.uid = uid
self.tty = tty
self.program = program
def __eq__(self, other):
if self.pid != other.pid:
return False
if self.username != other.username:
return False
if self.uid != other.uid:
return False
if self.tty != other.tty:
return False
if self.program != other.program:
return False
return True
def __ne__(self, other):
return not (self == other)
def __lt__(self, other):
return (self.pid, self.username, self.uid, self.tty, self.program) < \
(other.pid, other.username, other.uid, other.tty, other.program)
def __repr__(self):
return "[{} {} {} {} {}]".format(self.pid, self.username, self.uid, \
self.tty, self.program)
def _get_proc_info(program_name):
'''
:return: List of instances of _ProcInfo.
'''
def split_by_whitespace(s, maxsplit):
s = ' '.join(s.split())
return s.split(' ', maxsplit)
args = ['ps', '-ax', '-o', 'pid,user,uid,tty,command']
proc = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
infos = []
skipped_first = False
for line in proc.stdout:
line = bytes(line).decode('ascii')
if not skipped_first:
skipped_first = True
continue
parts = split_by_whitespace(line.strip(), 4)
pid, username, uid, tty, program = parts
if program_name not in program:
continue
# Darwin uses '??' to mean 'no TTY'; Linux uses '?'
if tty == '?' or tty == '??':
tty = None
infos.append(_ProcInfo(pid, username, uid, tty, program))
if proc.wait() != 0:
print(proc.stderr.read())
return Exception("ps returned non-0")
return infos
def sp_check_output(args):
proc = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
out, err = proc.communicate()
out = bytes(out).decode('ascii')
err = bytes(err).decode('ascii')
if proc.returncode != 0:
msg = "{args} failed.\nStdout:\n{out}\nStderr:\n{err}".format(
args=args,
out=out,
err=err
)
raise AssertionError(msg)
if len(err) > 0:
print("STDERR: {0}".format(err))
return out
def sp_nocheck_log_output(args):
proc = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
out, err = proc.communicate()
out = bytes(out).decode('ascii')
err = bytes(err).decode('ascii')
print(out)
print(err)
return proc.returncode
def _find_file(name, dir):
for dirpath, dirnames, filenames in os.walk(dir):
if name in filenames:
return os.path.join(dirpath, name)
return None
def find_program(name):
dirs = ['/bin', '/sbin', '/usr/bin', '/usr/sbin', '/usr/local/bin',
'/usr/local/sbin']
for dir in dirs:
path = _find_file(name, dir)
if path is not None:
return path
raise Exception("Cannot find program {0}".format(name))
def program_exists(name):
try:
find_program(name)
except:
return False
else:
return True
class SystemDServiceCtl(object):
def stop_jobber(self):
sp_nocheck_log_output(['systemctl', 'stop', 'jobber'])
def restart_jobber(self):
self.stop_jobber()
sp_check_output(['systemctl', 'start', 'jobber'])
def get_jobber_status(self):
return sp_check_output(['systemctl', 'status', 'jobber'])
def get_jobbermaster_logs(self):
return sp_check_output(['journalctl', '-u', 'jobber'])
class LaunchctlServiceCtl(object):
def stop_jobber(self):
sp_nocheck_log_output(['launchctl', 'stop', 'info.nekonya.jobber'])
def restart_jobber(self):
self.stop_jobber()
sp_check_output(['launchctl', 'start', 'info.nekonya.jobber'])
def get_jobber_status(self):
return 'unknown'
def get_jobbermaster_logs(self):
path = '/var/log/system.log'
if not os.path.isfile(path):
return '[Unknown path to system log]'
args = ['tail', '-n', '20', path]
lines = sp_check_output(args).split('\n')
lines = [l for l in lines if 'jobbermaster' in l]
return '/n'.join(lines)
class BrewServiceCtl(object):
def stop_jobber(self):
sp_nocheck_log_output(['brew', 'services', 'stop', 'jobber'])
def restart_jobber(self):
self.stop_jobber()
sp_check_output(['brew', 'services', 'start', 'jobber'])
def get_jobber_status(self):
return 'unknown'
def get_jobbermaster_logs(self):
path = '/var/log/system.log'
if not os.path.isfile(path):
return '[Unknown path to system log]'
args = ['tail', '-n', '20', path]
lines = sp_check_output(args).split('\n')
lines = [l for l in lines if 'jobbermaster' in l]
return '/n'.join(lines)
def parse_list_arg(s):
parts = s.split(',')
return set([p for p in parts if len(p) > 0])
class testlib(object):
ROBOT_LIBRARY_VERSION = 1.0
def __init__(self):
# get paths to stuff
self._jobber_path = find_program('jobber')
self._python_path = find_program('python')
self._tmpfile_dir = None
self._next_tmpfile_nbr = 1
# make service control object
if program_exists('systemctl'):
self._servicectl = Sys
|
temDServiceCtl()
elif program_exists(
|
'brew') and 'jobber ' in \
sp_check_output(['brew', 'services']):
self._servicectl = BrewServiceCtl()
elif program_exists('launchctl'):
self._servicectl = LaunchctlServiceCtl()
else:
raise Exception("Cannot determine how to control Jobber service")
@property
def _root_jobfile_path(self):
root_entry = pwd.getpwuid(0)
return os.path.join(root_entry.pw_dir, '.jobber')
@property
def _normuser_jobfile_path(self):
normuser_entry = pwd.getpwnam(_NORMUSER)
return os.path.join(normuser_entry.pw_dir, '.jobber')
def make_tempfile_dir(self):
# make temp-file dir
self._tmpfile_dir = tempfile.mkdtemp()
os.chmod(self._tmpfile_dir, _OCTAL_777)
def rm_tempfile_dir(self):
shutil.rmtree(self._tmpfile_dir)
self._tmpfile_dir = None
def make_tempfile(self, create=False):
path = os.path.join(self._tmpfile_dir,
"tmp-{0}".format(self._next_tmpfile_nbr))
self._next_tmpfile_nbr += 1
if create:
open(path, "w").close()
return path
def stop_service(self):
self._servicectl.stop_jobber()
def restart_service(self):
# restart jobber service
try:
self._servicectl.restart_jobber()
except Exception as e:
self.print_debug_info()
raise e
# wait for it to be ready
started = False
stop_time = time.time() + 20
while time.time() < stop_time and not started:
args = [self._jobber_path, 'list']
proc = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
_, err = proc.communicate()
if proc.returncode ==
|
alfredgamulo/cloud-custodian
|
tools/c7n_azure/c7n_azure/session.py
|
Python
|
apache-2.0
| 14,857
| 0.001952
|
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import importlib
import inspect
import json
import logging
import os
import sys
import types
from azure.common.credentials import BasicTokenAuthentication
from azure.core.credentials import AccessToken
from azure.identity import (AzureCliCredential, ClientSecretCredential,
ManagedIdentityCredential)
from azure.identity._credentials.azure_cli import _run_command
from msrestazure.azure_cloud import AZURE_PUBLIC_CLOUD
from requests import HTTPError
from c7n_azure import constants
from c7n_azure.utils import (C7nRetryPolicy, ManagedGroupHelper,
ResourceIdParser, StringUtils,
cost_query_override_api_version,
custodian_azure_send_override,
get_keyvault_auth_endpoint, get_keyvault_secret,
log_response_data)
from functools import lru_cache
log = logging.getLogger('custodian.azure.session')
class AzureCredential:
def __init__(self, cloud_endpoints, authorization_file=None, subscription_id_override=None):
# type: (*str, *str) -> None
if authorization_file:
with open(authorization_file) as json_file:
self._auth_params = json.load(json_file)
else:
self._auth_params = {
'client_id': os.environ.get(constants.ENV_CLIENT_ID),
'client_secret': os.environ.get(constants.ENV_CLIENT_SECRET),
'access_token': os.environ.get(constants.ENV_ACCESS_TOKEN),
'tenant_id': os.environ.get(constants.ENV_TENANT_ID),
'use_msi': bool(os.environ.get(constants.ENV_USE_MSI)),
'subscription_id': os.environ.get(constants.ENV_SUB_ID),
'keyvault_client_id': os.environ.get(constants.ENV_KEYVAULT_CLIENT_ID),
'keyvault_secret_id': os.environ.get(constants.ENV_KEYVAULT_SECRET_ID),
'enable_cli_auth': True
}
self._auth_params['authority'] = cloud_endpoints.endpoints.active_directory
keyvault_client_id = self._auth_params.get('keyvault_client_id')
keyvault_secret_id = self._auth_params.get('keyvault_secret_id')
# If user provided KeyVault secret, we will pull auth params information from it
try:
if keyvault_secret_id:
self._auth_params.update(
json.loads(
get_keyvault_secret(
keyvault_client_id,
keyvault_secret_id)
))
|
except HTTPError as e:
|
e.message = 'Failed to retrieve SP credential ' \
'from Key Vault with client id: {0}'.format(keyvault_client_id)
raise
self._credential = None
if self._auth_params.get('access_token') is not None:
auth_name = 'Access Token'
pass
elif (self._auth_params.get('client_id') and
self._auth_params.get('client_secret') and
self._auth_params.get('tenant_id')
):
auth_name = 'Principal'
self._credential = ClientSecretCredential(
client_id=self._auth_params['client_id'],
client_secret=self._auth_params['client_secret'],
tenant_id=self._auth_params['tenant_id'],
authority=self._auth_params['authority'])
elif self._auth_params.get('use_msi'):
auth_name = 'MSI'
self._credential = ManagedIdentityCredential(
client_id=self._auth_params.get('client_id'))
elif self._auth_params.get('enable_cli_auth'):
auth_name = 'Azure CLI'
self._credential = AzureCliCredential()
account_info, error = _run_command('az account show --output json')
account_json = json.loads(account_info)
self._auth_params['subscription_id'] = account_json['id']
self._auth_params['tenant_id'] = account_json['tenantId']
if error is not None:
raise Exception('Unable to query TenantId and SubscriptionId')
if subscription_id_override is not None:
self._auth_params['subscription_id'] = subscription_id_override
self._subscription_id = self._auth_params['subscription_id']
self._tenant_id = self._auth_params['tenant_id']
log.info('Authenticated [%s | %s%s]',
auth_name, self.subscription_id,
' | Authorization File' if authorization_file else '')
def get_token(self, *scopes, **kwargs):
# Access Token is used only in tests realistically because
# KeyVault, Storage and mgmt plane requires separate tokens.
# TODO: Should we scope this to tests only?
if (self._auth_params.get('access_token')):
return AccessToken(self._auth_params['access_token'], expires_on=0)
try:
return self._credential.get_token(*scopes, **kwargs)
except Exception as e:
log.error('Failed to authenticate.\nMessage: {}'.format(e))
exit(1)
# This is temporary until all SDKs we use are upgraded to Track 2
# List of legacy users:
# - DNS
# - Record Set (uses DNS SDK)
# - Azure Graph
def legacy_credentials(self, scope):
# Track 2 SDKs use tuple
token = self.get_token((scope + '.default'))
return BasicTokenAuthentication(token={'access_token': token.token})
@property
def tenant_id(self):
# type: (None) -> str
return self._tenant_id
@property
def auth_params(self):
# type: (None) -> str
return self._auth_params
@property
def subscription_id(self):
# type: (None) -> str
return self._subscription_id
class Session:
def __init__(self, subscription_id=None, authorization_file=None,
cloud_endpoints=None, resource_endpoint_type=constants.DEFAULT_AUTH_ENDPOINT):
"""
:param subscription_id: If provided overrides environment variables.
:param authorization_file: Path to file populated from 'get_functions_auth_string'
:param cloud_endpoints: List of endpoints for specified Azure Cloud. Defaults to public.
:param auth_endpoint: Resource endpoint for OAuth token.
"""
self._provider_cache = {}
self.subscription_id_override = subscription_id
self.credentials = None
self.authorization_file = authorization_file
self.cloud_endpoints = cloud_endpoints or AZURE_PUBLIC_CLOUD
self.resource_endpoint_type = resource_endpoint_type
self.resource_endpoint = self.get_auth_endpoint(resource_endpoint_type)
self.storage_endpoint = self.cloud_endpoints.suffixes.storage_endpoint
def _initialize_session(self):
"""
Creates a session using available authentication type.
"""
# Only run once
if self.credentials is not None:
return
try:
self.credentials = AzureCredential(
self.cloud_endpoints,
authorization_file=self.authorization_file,
subscription_id_override=self.subscription_id_override)
except Exception as e:
if hasattr(e, 'message'):
log.error(e.message)
else:
log.exception("Failed to authenticate.")
sys.exit(1)
if self.credentials is None:
log.error('Failed to authenticate.')
sys.exit(1)
def get_session_for_resource(self, resource):
return Session(
subscription_id=self.subscription_id_override,
authorization_file=self.authorization_file,
cloud_endpoints=self.cloud_endpoints,
resource_endpoint_type=resource)
@lru_cache()
def client(self, client, vault_url=None):
self._initialize_session()
service_name, client_name = client.rsplit('.', 1)
svc_module = importlib.import_mo
|
grimmjow8/ansible
|
lib/ansible/modules/commands/expect.py
|
Python
|
gpl-3.0
| 7,744
| 0.000646
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Matt Martz <matt@sivel.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will
|
be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'statu
|
s': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: expect
version_added: 2.0
short_description: Executes a command and responds to prompts
description:
- The C(expect) module executes a command and responds to prompts
- The given command will be executed on all selected nodes. It will not be
processed through the shell, so variables like C($HOME) and operations
like C("<"), C(">"), C("|"), and C("&") will not work
options:
command:
description:
- the command module takes command to run.
required: true
creates:
description:
- a filename, when it already exists, this step will B(not) be run.
required: false
removes:
description:
- a filename, when it does not exist, this step will B(not) be run.
required: false
chdir:
description:
- cd into this directory before running the command
required: false
responses:
description:
- Mapping of expected string/regex and string to respond with. If the
response is a list, successive matches return successive
responses. List functionality is new in 2.1.
required: true
timeout:
description:
- Amount of time in seconds to wait for the expected strings
default: 30
echo:
description:
- Whether or not to echo out your response strings
default: false
requirements:
- python >= 2.6
- pexpect >= 3.3
notes:
- If you want to run a command through the shell (say you are using C(<),
C(>), C(|), etc), you must specify a shell in the command such as
C(/bin/bash -c "/path/to/something | grep else")
- The question, or key, under I(responses) is a python regex match. Case
insensitive searches are indicated with a prefix of C(?i)
- By default, if a question is encountered multiple times, it's string
response will be repeated. If you need different responses for successive
question matches, instead of a string response, use a list of strings as
the response. The list functionality is new in 2.1
author: "Matt Martz (@sivel)"
'''
EXAMPLES = '''
# Case insensitve password string match
- expect:
command: passwd username
responses:
(?i)password: "MySekretPa$$word"
# Generic question with multiple different responses
- expect:
command: /path/to/custom/command
responses:
Question:
- response1
- response2
- response3
'''
import datetime
import os
try:
import pexpect
HAS_PEXPECT = True
except ImportError:
HAS_PEXPECT = False
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
def response_closure(module, question, responses):
resp_gen = (u'%s\n' % to_text(r).rstrip(u'\n') for r in responses)
def wrapped(info):
try:
return resp_gen.next()
except StopIteration:
module.fail_json(msg="No remaining responses for '%s', "
"output was '%s'" %
(question,
info['child_result_list'][-1]))
return wrapped
def main():
module = AnsibleModule(
argument_spec=dict(
command=dict(required=True),
chdir=dict(),
creates=dict(),
removes=dict(),
responses=dict(type='dict', required=True),
timeout=dict(type='int', default=30),
echo=dict(type='bool', default=False),
)
)
if not HAS_PEXPECT:
module.fail_json(msg='The pexpect python module is required')
chdir = module.params['chdir']
args = module.params['command']
creates = module.params['creates']
removes = module.params['removes']
responses = module.params['responses']
timeout = module.params['timeout']
echo = module.params['echo']
events = dict()
for key, value in responses.items():
if isinstance(value, list):
response = response_closure(module, key, value)
else:
response = u'%s\n' % to_text(value).rstrip(u'\n')
events[key.decode()] = response
if args.strip() == '':
module.fail_json(rc=256, msg="no command given")
if chdir:
chdir = os.path.abspath(os.path.expanduser(chdir))
os.chdir(chdir)
if creates:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
v = os.path.expanduser(creates)
if os.path.exists(v):
module.exit_json(
cmd=args,
stdout="skipped, since %s exists" % v,
changed=False,
rc=0
)
if removes:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
v = os.path.expanduser(removes)
if not os.path.exists(v):
module.exit_json(
cmd=args,
stdout="skipped, since %s does not exist" % v,
changed=False,
rc=0
)
startd = datetime.datetime.now()
try:
try:
# Prefer pexpect.run from pexpect>=4
out, rc = pexpect.run(args, timeout=timeout, withexitstatus=True,
events=events, cwd=chdir, echo=echo,
encoding='utf-8')
except TypeError:
# Use pexpect.runu in pexpect>=3.3,<4
out, rc = pexpect.runu(args, timeout=timeout, withexitstatus=True,
events=events, cwd=chdir, echo=echo)
except (TypeError, AttributeError):
e = get_exception()
# This should catch all insufficient versions of pexpect
# We deem them insufficient for their lack of ability to specify
# to not echo responses via the run/runu functions, which would
# potentially leak sensentive information
module.fail_json(msg='Insufficient version of pexpect installed '
'(%s), this module requires pexpect>=3.3. '
'Error was %s' % (pexpect.__version__, e))
except pexpect.ExceptionPexpect:
e = get_exception()
module.fail_json(msg='%s' % e)
endd = datetime.datetime.now()
delta = endd - startd
if out is None:
out = ''
ret = dict(
cmd=args,
stdout=out.rstrip('\r\n'),
rc=rc,
start=str(startd),
end=str(endd),
delta=str(delta),
changed=True,
)
if rc is not None:
module.exit_json(**ret)
else:
ret['msg'] = 'command exceeded timeout'
module.fail_json(**ret)
if __name__ == '__main__':
main()
|
ichuang/sympy
|
sympy/physics/mechanics/kane.py
|
Python
|
bsd-3-clause
| 37,447
| 0.001762
|
__all__ = ['Kane']
from sympy import Symbol, zeros, Matrix, diff, solve_linear_system_LU, eye
from sympy.utilities import default_sort_key
from sympy.physics.mechanics.essential import ReferenceFrame, dynamicsymbols
from sympy.physics.mechanics.particle import Particle
from sympy.physics.mechanics.point
|
import Point
from sympy.physics.mechanics.rigidbody import RigidBody
class Kane(object):
"""Kane's method object.
This object is used to do the "book-keeping" as you go through and form
equations of motion in the way Kane presents in:
Kane, T., Levinson, D. Dynamics Theory and Applications. 1985 McGraw-Hill
The attributes are for equations in the form [M] udot = forcing.
Very Important Warning: simp is set to True by de
|
fault, to the advantage of
smaller, simpler systems. If your system is large, it will lead to
slowdowns; however turning it off might have negative implications in
numerical evaluation. Care needs to be taken to appropriately reduce
expressions generated with simp==False, as they might be too large
themselves. Computing the relationship between independent and dependent
speeds (when dealing with non-holonomic systems) benefits from simp being
set to True (during the .speeds() method); the same is true for
linearization of non-holonomic systems. If numerical evaluations are
unsucessful with simp==False, try setting simp to True only for these
methods; this provides some compromise between the two options.
Attributes
==========
auxiliary : Matrix
If applicable, the set of auxiliary Kane's
equations used to solve for non-contributing
forces.
mass_matrix : Matrix
The system's mass matrix
forcing : Matrix
The system's forcing vector
simp : Boolean
Flag determining whether simplification of symbolic matrix
inversion can occur or not
mass_matrix_full : Matrix
The "mass matrix" for the u's and q's
forcing_full : Matrix
The "forcing vector" for the u's and q's
Examples
========
This is a simple example for a one defree of freedom translational
spring-mass-damper.
In this example, we first need to do the kinematics.
This involves creating generalized speeds and coordinates and their
derivatives.
Then we create a point and set its velocity in a frame::
>>> from sympy import symbols
>>> from sympy.physics.mechanics import dynamicsymbols, ReferenceFrame
>>> from sympy.physics.mechanics import Point, Particle, Kane
>>> q, u = dynamicsymbols('q u')
>>> qd, ud = dynamicsymbols('q u', 1)
>>> m, c, k = symbols('m c k')
>>> N = ReferenceFrame('N')
>>> P = Point('P')
>>> P.set_vel(N, u * N.x)
Next we need to arrange/store information in the way the Kane requires.
The kinematic differential equations need to be stored in a dict.
A list of forces/torques must be constructed, where each entry in the list
is a (Point, Vector) or (ReferenceFrame, Vector) tuple, where the Vectors
represent the Force or Torque.
Next a particle needs to be created, and it needs to have a point and mass
assigned to it.
Finally, a list of all bodies and particles needs to be created::
>>> kd = [qd - u]
>>> FL = [(P, (-k * q - c * u) * N.x)]
>>> pa = Particle('pa', P, m)
>>> BL = [pa]
Finally we can generate the equations of motion.
First we create the Kane object and supply an inertial frame.
Next we pass it the generalized speeds.
Then we pass it the kinematic differential equation dict.
Next we form FR* and FR to complete: Fr + Fr* = 0.
We have the equations of motion at this point.
It makes sense to rearrnge them though, so we calculate the mass matrix and
the forcing terms, for E.o.M. in the form: [MM] udot = forcing, where MM is
the mass matrix, udot is a vector of the time derivatives of the
generalized speeds, and forcing is a vector representing "forcing" terms::
>>> KM = Kane(N)
>>> KM.coords([q])
>>> KM.speeds([u])
>>> KM.kindiffeq(kd)
>>> (fr, frstar) = KM.kanes_equations(FL, BL)
>>> MM = KM.mass_matrix
>>> forcing = KM.forcing
>>> rhs = MM.inv() * forcing
>>> rhs
[-(c*u(t) + k*q(t))/m]
>>> KM.linearize()[0]
[0, 1]
[k, c]
Please look at the documentation pages for more information on how to
perform linearization and how to deal with dependent coordinates & speeds,
and how do deal with bringing non-contributing forces into evidence.
"""
simp = True
def __init__(self, frame):
"""Supply the inertial frame for Kane initialization. """
# Big storage things
self._inertial = frame
self._forcelist = None
self._bodylist = None
self._fr = None
self._frstar = None
self._rhs = None
self._aux_eq = None
# States
self._q = None
self._qdep = []
self._qdot = None
self._u = None
self._udep = []
self._udot = None
self._uaux = None
# Differential Equations Matrices
self._k_d = None
self._f_d = None
self._k_kqdot = None
self._k_ku = None
self._f_k = None
# Constraint Matrices
self._f_h = Matrix([])
self._k_nh = Matrix([])
self._f_nh = Matrix([])
self._k_dnh = Matrix([])
self._f_dnh = Matrix([])
def _find_dynamicsymbols(self, inlist, insyms=[]):
"""Finds all non-supplied dynamicsymbols in the expressions."""
from sympy.core.function import AppliedUndef, Derivative
t = dynamicsymbols._t
return reduce(set.union, [set([i]) for j in inlist
for i in j.atoms(AppliedUndef, Derivative)
if i.atoms() == set([t])], set()) - insyms
temp_f = set().union(*[i.atoms(AppliedUndef) for i in inlist])
temp_d = set().union(*[i.atoms(Derivative) for i in inlist])
set_f = set([a for a in temp_f if a.args == (t,)])
set_d = set([a for a in temp_d if ((a.args[0] in set_f) and all([i == t
for i in a.variables]))])
return list(set.union(set_f, set_d) - set(insyms))
def _find_othersymbols(self, inlist, insyms=[]):
"""Finds all non-dynamic symbols in the expressions."""
return list(reduce(set.union, [i.atoms(Symbol) for i in inlist]) -
set(insyms))
def _mat_inv_mul(self, A, B):
"""Internal Function
Computes A^-1 * B symbolically w/ substitution, where B is not
necessarily a vector, but can be a matrix.
"""
# Note: investigate difficulty in only creating symbols for non-zero
# entries; this could speed things up, perhaps?
r1, c1 = A.shape
r2, c2 = B.shape
temp1 = Matrix(r1, c1, lambda i, j: Symbol('x' + str(j + r1 * i)))
temp2 = Matrix(r2, c2, lambda i, j: Symbol('y' + str(j + r2 * i)))
for i in range(len(temp1)):
if A[i] == 0:
temp1[i] = 0
for i in range(len(temp2)):
if B[i] == 0:
temp2[i] = 0
temp3 = []
for i in range(c2):
temp3.append(temp1.LUsolve(temp2.extract(range(r2), [i])))
temp3 = Matrix([i.T for i in temp3]).T
if Kane.simp == True:
temp3.simplify()
return temp3.subs(dict(zip(temp1, A))).subs(dict(zip(temp2, B)))
def coords(self, qind, qdep=[], coneqs=[]):
"""Supply all the generalized coordiantes in a list.
If some coordinates are dependent, supply them as part of qdep. Their
dependent nature will only show up in the linearization process though.
Parameters
==========
qind : list
A list of independent generalized coords
qdep : list
List of dependent coordinates
coneq : list
List of expressions which are equal to zero; these are the
configuration constraint eq
|
js0701/chromium-crosswalk
|
third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/controllers/manager.py
|
Python
|
bsd-3-clause
| 27,322
| 0.003221
|
# Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
The Manager runs a series of tests (TestType interface) against a set
of test files. If a test file fails a TestType, it returns a list of TestFailure
objects to the Manager. The Manager then aggregates the TestFailures to
create a final report.
"""
import datetime
import json
import logging
import random
import sys
import time
from webkitpy.common.net.file_uploader import FileUploader
from webkitpy.layout_tests.controllers.layout_test_finder import LayoutTestFinder
from webkitpy.layout_tests.controllers.layout_test_runner import LayoutTestRunner
from webkitpy.layout_tests.controllers.test_result_writer import TestResultWriter
from webkitpy.layout_tests.layout_package import json_results_generator
from webkitpy.layout_tests.models import test_expectations
from webkitpy.layout_tests.models import test_failures
from webkitpy.layout_tests.models import test_run_results
from webkitpy.layout_tests.models.test_input import TestInput
from webkitpy.tool import grammar
_log = logging.getLogger(__name__)
# Builder base URL where we have the archived test results.
BUILDER_BASE_URL = "http://build.chromium.org/buildbot/layout_test_results/"
TestExpectations = test_expectations.TestExpectations
class Manager(object):
"""A class for managing running a series of tests on a series of layout
test files."""
def __init__(self, port, options, printer):
"""Initialize test runner data structures.
Args:
port: an object implementing port-specific
options: a dictionary of command line options
printer: a Printer object to record updates to.
"""
self._port = port
self._filesystem = port.host.filesystem
self._options = options
self._printer = printer
self._expectations = None
self.HTTP_SUBDIR = 'http' + port.TEST_PATH_SEPARATOR
se
|
lf.INSPECTOR_SUBDIR = 'inspector' + port.TEST_PATH_SEPARATOR
self.PERF_SUBDIR = 'perf'
self.WEBSOCKET_SUBDIR = 'websocket' + port.TEST_PATH_SEPA
|
RATOR
self.VIRTUAL_HTTP_SUBDIR = port.TEST_PATH_SEPARATOR.join([
'virtual', 'stable', 'http'])
self.LAYOUT_TESTS_DIRECTORY = 'LayoutTests'
self.ARCHIVED_RESULTS_LIMIT = 25
self._http_server_started = False
self._wptserve_started = False
self._websockets_server_started = False
self._results_directory = self._port.results_directory()
self._finder = LayoutTestFinder(self._port, self._options)
self._runner = LayoutTestRunner(self._options, self._port, self._printer, self._results_directory, self._test_is_slow)
def _collect_tests(self, args):
return self._finder.find_tests(args, test_list=self._options.test_list,
fastest_percentile=self._options.fastest)
def _is_http_test(self, test):
return (
test.startswith(self.HTTP_SUBDIR) or
self._is_websocket_test(test) or
self.VIRTUAL_HTTP_SUBDIR in test
)
def _is_inspector_test(self, test):
return self.INSPECTOR_SUBDIR in test
def _is_websocket_test(self, test):
if self._port.is_wpt_enabled() and self._port.is_wpt_test(test):
return False
return self.WEBSOCKET_SUBDIR in test
def _http_tests(self, test_names):
return set(test for test in test_names if self._is_http_test(test))
def _is_perf_test(self, test):
return self.PERF_SUBDIR == test or (self.PERF_SUBDIR + self._port.TEST_PATH_SEPARATOR) in test
def _prepare_lists(self, paths, test_names):
tests_to_skip = self._finder.skip_tests(paths, test_names, self._expectations, self._http_tests(test_names))
tests_to_run = [test for test in test_names if test not in tests_to_skip]
if not tests_to_run:
return tests_to_run, tests_to_skip
# Create a sorted list of test files so the subset chunk,
# if used, contains alphabetically consecutive tests.
if self._options.order == 'natural':
tests_to_run.sort(key=self._port.test_key)
elif self._options.order == 'random':
random.shuffle(tests_to_run)
elif self._options.order == 'random-seeded':
rnd = random.Random()
rnd.seed(4) # http://xkcd.com/221/
rnd.shuffle(tests_to_run)
tests_to_run, tests_in_other_chunks = self._finder.split_into_chunks(tests_to_run)
self._expectations.add_extra_skipped_tests(tests_in_other_chunks)
tests_to_skip.update(tests_in_other_chunks)
return tests_to_run, tests_to_skip
def _test_input_for_file(self, test_file):
return TestInput(test_file,
self._options.slow_time_out_ms if self._test_is_slow(test_file) else self._options.time_out_ms,
self._test_requires_lock(test_file),
should_add_missing_baselines=(self._options.new_test_results and not self._test_is_expected_missing(test_file)))
def _test_requires_lock(self, test_file):
"""Return True if the test needs to be locked when
running multiple copies of NRWTs. Perf tests are locked
because heavy load caused by running other tests in parallel
might cause some of them to timeout."""
return self._is_http_test(test_file) or self._is_perf_test(test_file)
def _test_is_expected_missing(self, test_file):
expectations = self._expectations.model().get_expectations(test_file)
return test_expectations.MISSING in expectations or test_expectations.NEEDS_REBASELINE in expectations or test_expectations.NEEDS_MANUAL_REBASELINE in expectations
def _test_is_slow(self, test_file):
return test_expectations.SLOW in self._expectations.model().get_expectations(test_file)
def needs_servers(self, test_names):
return any(self._test_requires_lock(test_name) for test_name in test_names)
def _rename_results_folder(self):
try:
timestamp = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(self._filesystem.mtime(self._filesystem.join(self._results_directory, "results.html"))))
except (IOError, OSError), e:
# It might be possible that results.html was not generated in previous run, because the test
# run was interrupted even before testing started. In those cases, don't archive the folder.
# Simply override the current folder contents with new results.
import errno
if e.errno == errno.EEXIST or e.errno == errno.ENOENT:
self._printer.write_
|
pli3/enigma2-git
|
lib/python/Components/NimManager.py
|
Python
|
gpl-2.0
| 63,253
| 0.034623
|
from Tools.HardwareInfo import HardwareInfo
from Tools.BoundFunction import boundFunction
from config import config, ConfigSubsection, ConfigSelection, ConfigFloat, \
ConfigSatlist, ConfigYesNo, ConfigInteger, ConfigSubList, ConfigNothing, \
ConfigSubDict, ConfigOnOff, ConfigDateTime
from enigma import eDVBSatelliteEquipmentControl as secClass, \
eDVBSatelliteLNBParameters as lnbParam, \
eDVBSatelliteDiseqcParameters as diseqcParam, \
eDVBSatelliteSwitchParameters as switchParam, \
eDVBSatelliteRotorParameters as rotorParam, \
eDVBResourceManager, eDVBDB, eEnv
from time import localtime, mktime
from datetime import datetime
from Tools.BoundFunction import boundFunction
from Tools import Directories
import xml.etree.cElementTree
def getConfigSatlist(orbpos, satlist):
default_orbpos = None
for x in satlist:
if x[0] == orbpos:
default_orbpos = orbpos
break
return ConfigSatlist(satlist, default_orbpos)
class SecConfigure:
def getConfiguredSats(self):
return self.configuredSatellites
def addSatellite(self, sec, orbpos):
sec.addSatellite(orbpos)
self.configuredSatellites.add(orbpos)
def addLNBSimple(self, sec, slotid, diseqcmode, toneburstmode = diseqcParam.NO, diseqcpos = diseqcParam.SENDNO, orbpos = 0, longitude = 0, latitude = 0, loDirection = 0, laDirection = 0, turningSpeed = rotorParam.FAST, useInputPower=True, inputPowerDelta=50, fastDiSEqC = False, setVoltageTone = True, diseqc13V = False):
if orbpos is None or orbpos == 3600 or orbpos == 3601:
return
#simple defaults
sec.addLNB()
tunermask = 1 << slotid
if self.equal.has_key(slotid):
for slot in self.equal[slotid]:
tunermask |= (1 << slot)
if self.linked.has_key(slotid):
for slot in self.linked[slotid]:
tunermask |= (1 << slot)
sec.setLNBSatCR(-1)
sec.setLNBNum(1)
sec.setLNBLOFL(9750000)
sec.setLNBLOFH(10600000)
sec.setLNBThreshold(11700000)
sec.setLNBIncreasedVoltage(lnbParam.OFF)
sec.setRepeats(0)
sec.setFastDiSEqC(fastDiSEqC)
sec.setSeqRepeat(0)
sec.setCommandOrder(0)
#user values
sec.setDiSEqCMode(diseqcmode)
sec.setToneburst(toneburstmode)
sec.setCommittedCommand(diseqcpos)
sec.setUncommittedCommand(0) # SENDNO
#print "set orbpos to:" + str(orbpos)
if 0 <= diseqcmode < 3:
self.addSatellite(sec, orbpos)
if setVoltageTone:
if diseqc13V:
sec.setVoltageMode(switchParam.HV_13)
else:
sec.setVoltageMode(switchParam.HV)
sec.setToneMode(switchParam.HILO)
else:
sec.setVoltageMode(switchParam._14V)
sec.setToneMode(switchParam.OFF)
elif (diseqcmode == 3): # diseqc 1.2
if self.satposdepends.has_key(slotid):
for slot in self.satposdepends[slotid]:
tunermask |= (1 << slot)
sec.setLatitude(latitude)
sec.setLaDirection(laDirection)
sec.setLongitude(longitude)
sec.setLoDirection(loDirection)
sec.setUseInputpower(useInputPower)
sec.setInputpowerDelta(inputPowerDelta)
sec.setRotorTurningSpeed(turningSpeed)
for x in self.NimManager.satList:
print "Add sat " + str(x[0])
self.addSatellite(sec, int(x[0]))
if diseqc13V:
sec.setVoltageMode(switchParam.HV_13)
else:
sec.setVoltageMode(switchParam.HV)
sec.setToneMode(switchParam.HILO)
sec.setRotorPosNum(0) # USALS
sec.setLNBSlotMask(tunermask)
def setSatposDepends(self, sec, nim1, nim2):
print "tuner", nim1, "depends on satpos of", nim2
sec.setTunerDepends(nim1, nim2)
def linkInternally(self, slotid):
nim = self.NimManager.getNim(slotid)
if nim.internallyConnectableTo is not None:
nim.setInternalLink()
def linkNIMs(self, sec, nim1, nim2):
print "link tuner", nim1, "to tuner", nim2
if nim2 == (nim1 - 1):
self.linkInternally(nim1)
sec.setTunerLinked(nim1, nim2)
def getRoot(self, slotid, connto):
visited = []
while (self.NimManager.getNimConfig(connto).configMode.value in ("satposdepends", "equal", "loopthrough")):
connto = int(self.NimManager.getNimConfig(connto).connectedTo.value)
if connto in visited: # prevent endless loop
return slotid
visited.append(connto)
return connto
def update(self):
sec = secClass.getInstance()
self.configuredSatellites = set()
for slotid in self.NimManager.getNimListOfType("DVB-S"):
if self.NimManager.nimInternallyConnectableTo(slotid) is not None:
self.NimManager.nimRemoveInternalLink(slotid)
sec.clear() ## this do unlinking NIMs too !!
print "sec config cleared"
self.linked = { }
self.satposdepends = { }
self.equal = { }
nim_slots = self.NimManager.nim_slots
used_nim_slots = [ ]
|
for slot in nim_slots:
if slot.type is not None:
used_nim_slots.append((slot.slot, slot.description, slot.config.configMode.value != "nothing" and True or False, slot.isCompatible("DVB-S2"), slot.frontend_id is None and -1 or slot.frontend_id))
eDVBResourceManager.getInstance().se
|
tFrontendSlotInformations(used_nim_slots)
for slot in nim_slots:
if slot.frontend_id is not None:
types = [type for type in ["DVB-T", "DVB-C", "DVB-S", "ATSC"] if eDVBResourceManager.getInstance().frontendIsCompatible(slot.frontend_id, type)]
if len(types) > 1:
slot.multi_type = {}
for type in types:
slot.multi_type[str(types.index(type))] = type
for slot in nim_slots:
x = slot.slot
nim = slot.config
if slot.isCompatible("DVB-S"):
# save what nim we link to/are equal to/satposdepends to.
# this is stored in the *value* (not index!) of the config list
if nim.configMode.value == "equal":
connto = self.getRoot(x, int(nim.connectedTo.value))
if not self.equal.has_key(connto):
self.equal[connto] = []
self.equal[connto].append(x)
elif nim.configMode.value == "loopthrough":
self.linkNIMs(sec, x, int(nim.connectedTo.value))
connto = self.getRoot(x, int(nim.connectedTo.value))
if not self.linked.has_key(connto):
self.linked[connto] = []
self.linked[connto].append(x)
elif nim.configMode.value == "satposdepends":
self.setSatposDepends(sec, x, int(nim.connectedTo.value))
connto = self.getRoot(x, int(nim.connectedTo.value))
if not self.satposdepends.has_key(connto):
self.satposdepends[connto] = []
self.satposdepends[connto].append(x)
for slot in nim_slots:
x = slot.slot
nim = slot.config
hw = HardwareInfo()
if slot.isCompatible("DVB-S"):
print "slot: " + str(x) + " configmode: " + str(nim.configMode.value)
if nim.configMode.value in ( "loopthrough", "satposdepends", "nothing" ):
pass
else:
sec.setSlotNotLinked(x)
if nim.configMode.value == "equal":
pass
elif nim.configMode.value == "simple": #simple config
print "diseqcmode: ", nim.diseqcMode.value
if nim.diseqcMode.value == "single": #single
if nim.simpleSingleSendDiSEqC.value:
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, diseqc13V = nim.diseqc13V.value)
else:
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.NONE, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value)
elif nim.diseqcMode.value == "toneburst_a_b": #Toneburst A/B
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.A, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value)
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.B, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value)
elif nim.diseqcMode.value == "diseqc_a_b": #DiSEqC A/B
fastDiSEqC = nim.simpleDiSEqCOnlyOnSatChange.value
setVoltageTone = nim.simpleDiSEqCSetVoltageTone.value
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
self.ad
|
AndrewPeelMV/Blender2.78c
|
2.78/scripts/startup/bl_ui/space_userpref.py
|
Python
|
gpl-2.0
| 50,089
| 0.001437
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy
from bpy.types import Header, Menu, Panel
from bpy.app.translations import pgettext_iface as iface_
from bpy.app.translations import contexts as i18n_contexts
def opengl_lamp_buttons(column, lamp):
split = column.row()
split.prop(lamp, "use", text="", icon='OUTLINER_OB_LAMP' if lamp.use else 'LAMP_DATA')
col = split.column()
col.active = lamp.use
row = col.row()
row.label(text="Diffuse:")
row.prop(lamp, "diffuse_color", text="")
row = col.row()
row.label(text="Specular:")
row.prop(lamp, "specular_color", text="")
col = split.column()
col.active = lamp.use
col.prop(lamp, "direction", text="")
class USERPREF_HT_header(Header):
bl_space_type = 'USER_PREFERENCES'
def draw(self, context):
layout = self.layout
layout.template_header()
userpref = context.user_preferences
layout.operator_context = 'EXEC_AREA'
layout.operator("wm.save_userpref")
layout.operator_context = 'INVOKE_DEFAULT'
if userpref.active_section == 'INPUT':
layout.operator("wm.keyconfig_import")
layout.operator("wm.keyconfig_export")
elif userpref.active_section == 'ADDONS':
layout.operator("wm.addon_install", icon='FILESEL')
layout.operator("wm.addon_refresh", icon='FILE_REFRESH')
layout.menu("USERPREF_MT_addons_online_resources")
elif userpref.active_section == 'THEMES':
layout.operator("ui.reset_default_theme")
layout.operator("wm.theme_install")
class USERPREF_PT_tabs(Panel):
bl_label = ""
bl_space_type = 'USER_PREFERENCES'
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
def draw(self, context):
layout = self.layout
userpref = context.user_preferences
layout.prop(userpref, "active_section", expand=True)
class USERPREF_MT_interaction_presets(Menu):
bl_label = "Presets"
preset_subdir = "interaction"
preset_operator = "script.execute_preset"
draw = Menu.draw_preset
class USERPREF_MT_appconfigs(Menu):
bl_label = "AppPresets"
preset_subdir = "keyconfig"
preset_operator = "wm.appconfig_activate"
def draw(self, context):
self.layout.operator("wm.appconfig_default", text="Blender (default)")
# now draw the presets
Menu.draw_preset(self, context)
class USERPREF_MT_splash(Menu):
bl_label = "Splash"
def draw(self, context):
layout = s
|
elf.layout
split = layout.split()
|
row = split.row()
row.label("")
row = split.row()
row.label("Interaction:")
text = bpy.path.display_name(context.window_manager.keyconfigs.active.name)
if not text:
text = "Blender (default)"
row.menu("USERPREF_MT_appconfigs", text=text)
# only for addons
class USERPREF_MT_splash_footer(Menu):
bl_label = ""
def draw(self, context):
pass
class USERPREF_PT_interface(Panel):
bl_space_type = 'USER_PREFERENCES'
bl_label = "Interface"
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
@classmethod
def poll(cls, context):
userpref = context.user_preferences
return (userpref.active_section == 'INTERFACE')
def draw(self, context):
import sys
layout = self.layout
userpref = context.user_preferences
view = userpref.view
row = layout.row()
col = row.column()
col.label(text="Display:")
col.prop(view, "show_tooltips")
col.prop(view, "show_tooltips_python")
col.prop(view, "show_object_info", text="Object Info")
col.prop(view, "show_large_cursors")
col.prop(view, "show_view_name", text="View Name")
col.prop(view, "show_playback_fps", text="Playback FPS")
col.prop(view, "use_global_scene")
col.prop(view, "object_origin_size")
col.separator()
col.separator()
col.separator()
col.prop(view, "show_mini_axis", text="Display Mini Axis")
sub = col.column()
sub.active = view.show_mini_axis
sub.prop(view, "mini_axis_size", text="Size")
sub.prop(view, "mini_axis_brightness", text="Brightness")
col.separator()
if sys.platform[:3] == "win":
col.label("Warnings")
col.prop(view, "use_quit_dialog")
row.separator()
row.separator()
col = row.column()
col.label(text="View Manipulation:")
col.prop(view, "use_mouse_depth_cursor")
col.prop(view, "use_mouse_depth_navigate")
col.prop(view, "use_zoom_to_mouse")
col.prop(view, "use_rotate_around_active")
col.prop(view, "use_global_pivot")
col.prop(view, "use_camera_lock_parent")
col.separator()
col.prop(view, "use_auto_perspective")
col.prop(view, "smooth_view")
col.prop(view, "rotation_angle")
col.separator()
col.separator()
col.label(text="2D Viewports:")
col.prop(view, "view2d_grid_spacing_min", text="Minimum Grid Spacing")
col.prop(view, "timecode_style")
col.prop(view, "view_frame_type")
if (view.view_frame_type == 'SECONDS'):
col.prop(view, "view_frame_seconds")
elif (view.view_frame_type == 'KEYFRAMES'):
col.prop(view, "view_frame_keyframes")
row.separator()
row.separator()
col = row.column()
#Toolbox doesn't exist yet
#col.label(text="Toolbox:")
#col.prop(view, "show_column_layout")
#col.label(text="Open Toolbox Delay:")
#col.prop(view, "open_left_mouse_delay", text="Hold LMB")
#col.prop(view, "open_right_mouse_delay", text="Hold RMB")
col.prop(view, "show_manipulator")
sub = col.column()
sub.active = view.show_manipulator
sub.prop(view, "manipulator_size", text="Size")
sub.prop(view, "manipulator_handle_size", text="Handle Size")
sub.prop(view, "manipulator_hotspot", text="Hotspot")
col.separator()
col.separator()
col.separator()
col.label(text="Menus:")
col.prop(view, "use_mouse_over_open")
sub = col.column()
sub.active = view.use_mouse_over_open
sub.prop(view, "open_toplevel_delay", text="Top Level")
sub.prop(view, "open_sublevel_delay", text="Sub Level")
col.separator()
col.label(text="Pie Menus:")
sub = col.column(align=True)
sub.prop(view, "pie_animation_timeout")
sub.prop(view, "pie_initial_timeout")
sub.prop(view, "pie_menu_radius")
sub.prop(view, "pie_menu_threshold")
sub.prop(view, "pie_menu_confirm")
col.separator()
col.separator()
col.separator()
col.prop(view, "show_splash")
class USERPREF_PT_edit(Panel):
bl_space_type = 'USER_PREFERENCES'
bl_label = "Edit"
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
@classmethod
def poll(cls, context):
userpref = context.user_preferences
return (userpref.active_section == 'EDITING')
def draw(self, context):
layout = self.layout
userpref = context.user_preferences
edit = userpref.edit
row = layout.row()
col = row.c
|
pombredanne/unuk
|
src/unuk/benchmarks/__init__.py
|
Python
|
bsd-3-clause
| 63
| 0
|
import http
|
benc
|
hmark
from unuk.benchmarks.base import runtests
|
gloryofrobots/obin
|
arza/compile/compiler.py
|
Python
|
gpl-2.0
| 43,248
| 0.000694
|
__author__ = 'gloryofrobots'
from arza.compile.code.opcode import *
from arza.compile.parse import parser
from arza.compile import simplify
from arza.compile.parse import nodes
from arza.compile.parse.basic import IMPORT_NODES
from arza.compile.parse.nodes import (node_type, imported_name_to_s,
node_first, node_second, node_third, node_fourth,
node_children, is_empty_node)
from arza.compile.parse.node_type import *
from arza.compile.code.source import CodeSource, codeinfo, codeinfo_unknown, SourceInfo
from arza.misc import platform, strutil
from arza.runtime import error
from arza.types import space, api, plist, environment, symbol as symbols, string as strings
from arza.builtins import lang_names
# TODO REMOVE NIL as token and node_type
# TODO OPTIMISE STORE_LOCAL
def compile_error(compiler, code, node, message):
line = code.info.get_line(api.to_i(nodes.node_line(node)))
return error.throw(error.Errors.COMPILE_ERROR,
space.newtuple([
space.newstring(message),
space.newint(nodes.node_type(node)),
space.newstring_s(nodes.node_value_s(node)),
space.newtuple([space.newstring(u"line"), nodes.node_line(node),
space.newstring(u"column"), nodes.node_column(node)]),
space.newstring(line)
]))
class Compiler:
def __init__(self, process, env, path, src):
self.process = process
self.env = env
self.scopes = []
self.source_path = path
self.source = src
def info(node):
if is_empty_node(node):
return codeinfo_unknown()
return codeinfo(nodes.node_position(node), nodes.node_line(node), nodes.node_column(node))
########################
# SCOPES
########################
def _enter_scope(compiler):
new_scope = space.newscope()
compiler.scopes.append(new_scope)
def _exit_scope(compiler):
compiler.scopes.pop()
def _current_scope(compiler):
return compiler.scopes[-1]
def _previous_scope(compiler):
if len(compiler.scopes) == 1:
return None
return compiler.scopes[-2]
def _is_modifiable_binding(compiler, name):
scope = _current_scope(compiler)
if not platform.is_absent_index(scope.get_scope_local_index(name)):
return True
return False
def _declare_arguments(compiler, args_count, varargs):
_current_scope(compiler).declare_scope_arguments(args_count, varargs)
def _declare_reference(compiler, symbol):
assert space.issymbol(symbol)
scope = _current_scope(compiler)
idx = scope.get_scope_reference(symbol)
if platform.is_absent_index(idx):
idx = scope.add_scope_reference(symbol)
return idx
def _declare_static_reference(compiler, ref):
scope = _current_scope(compiler)
if scope.has_possible_static_reference(ref):
return
# print "REF", ref.name
scope.add_possible_static_reference(ref)
def _declare_literal(compiler, literal):
assert space.isany(literal)
scope = _current_scope(compiler)
idx = scope.get_scope_literal(literal)
if platform.is_absent_index(idx):
idx = scope.add_scope_literal(literal)
return idx
def _declare_string(compiler, literal):
assert space.isstring(literal)
scope = _current_scope(compiler)
idx = scope.get_string(literal)
if platform.is_absent_index(idx):
idx = scope.add_string(literal)
return idx
def _declare_char(compiler, literal):
assert space.ischar(literal)
scope = _current_scope(compiler)
idx = scope.get_char(literal)
if platform.is_absent_index(idx):
idx = scope.add_char(literal)
return idx
def _declare_float(compiler, literal):
assert space.isfloat(literal)
scope = _current_scope(compiler)
idx = scope.get_float(literal)
if platform.is_absent_index(idx):
idx = scope.add_float(literal)
return idx
def _declare_int(compiler, literal):
assert space.isint(literal)
scope = _current_scope(compiler)
idx = scope.get_int(literal)
if platform.is_absent_index(idx):
idx = scope.add_int(literal)
return idx
def _declare_symbol(compiler, sym):
assert space.issymbol(sym)
scope = _current_scope(compiler)
idx = scope.get_scope_symbol(sym)
if platform.is_absent_index(idx):
idx = scope.add_scope_symbol(sym)
return idx
def _declare_temporary(compiler):
scope = _current_scope(compiler)
return scope.add_temporary()
def _has_temporary(compiler, idx):
scope = _current_scope(compiler)
return scope.has_temporary(idx)
def _declare_local(compiler, symbol):
assert space.issymbol(symbol)
assert not api.isempty(symbol)
scope = _current_scope(compiler)
idx = scope.get_scope_local_index(symbol)
if not platform.is_absent_index(idx):
return idx
idx = sco
|
pe.add_scope_local(symbol)
assert not platform.is_absent_index(idx)
return idx
def _declare_export(compiler, code, node):
name = _get_symbol_name(compiler, node)
scope = _current_scope(compiler)
if sco
|
pe.has_export(name):
compile_error(compiler, code, node, u"Name has already exported")
scope.add_export(name)
def _declare_import(compiler, name, func):
assert space.issymbol(name)
assert not api.isempty(name)
scope = _current_scope(compiler)
idx = scope.get_import_index(name)
if not platform.is_absent_index(idx):
return idx
idx = scope.add_import(name, func)
assert not platform.is_absent_index(idx)
return idx
def _declare_function(compiler, code, node):
symbol = _get_symbol_name_or_empty(compiler.process, node)
scope = _current_scope(compiler)
idx = scope.get_scope_local_index(symbol)
if not platform.is_absent_index(idx):
compile_error(compiler, code, node, u"Name has already assigned")
idx = scope.add_scope_local(symbol)
scope.add_function(symbol, idx)
return idx
def _get_function_index(compiler, symbol):
scope = _current_scope(compiler)
idx = scope.get_function(symbol)
# TODO make two compiler passes
# non statement function like fun f-> end ()
if platform.is_absent_index(idx):
return _declare_local(compiler, symbol)
return idx
def _get_variable_index(compiler, code, node, name):
assert space.issymbol(name)
"""
return var_index, is_local_variable
"""
scope_id = 0
for scope in reversed(compiler.scopes):
idx = scope.get_scope_local_index(name)
if not platform.is_absent_index(idx):
if scope_id == 0:
return idx, True
else:
# TODO here can be optimisation where we can calculate number of scopes to find back variable
ref_id = _declare_reference(compiler, name)
return ref_id, False
scope_id += 1
ref_id = _declare_reference(compiler, name)
ref = environment.get_reference(compiler.env, name)
if space.isvoid(ref):
names_s = api.to_s(name)
# HACK for late binding of internal names in prelude
if not names_s.startswith(lang_names.PREFIX):
# for name in _current_scope(compiler).imports.keys():
# print name
return compile_error(compiler, code, node, u"Unreachable variable `%s`" % api.to_s(name))
else:
_declare_static_reference(compiler, ref)
return ref_id, False
# *******************************************************
# EMIT HELPERS *******************************************
# **************************************************
def _emit_call(compiler, code, node, arg_count, funcname):
func = nodes.create_name_node_s(nodes.node_token(node), funcname)
_compile(compiler, code, func)
code.emit_1(CALL, arg_count, info(node))
def _emit_store_name(compiler, code, namenode):
name = _get_symbol_name(compiler, namenode)
# name = space.newsymbol_s(compiler.process, nodes.node_value_s(namenode))
_emit_store(compil
|
de-vri-es/qtile
|
libqtile/widget/khal_calendar.py
|
Python
|
mit
| 5,419
| 0.000554
|
# -*- coding: utf-8 -*-
###################################################################
# This widget will display the next appointment on your calendar in
# the qtile status bar. Appointments within the "reminder" time will be
# highlighted. Authentication credentials are stored on disk.
#
# This widget uses the khal command line calendar utility available at
# https://github.com/geier/khal
#
# This widget also requires the dateutil.parser module.
# If you get a strange "AttributeError: 'module' object has no attribute
# GoogleCalendar" error, you are probably missing a module. Check
# carefully.
#
# Thanks to the creator of the YahooWeather widget (dmpayton). This code
# borrows liberally from that one.
#
# Copyright (c) 2016 by David R. Andersen <k0rx@RXcomm.net>
# New khal output format adjustment, 2016 Christoph Lassner
# Licensed under the Gnu Public License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
###################################################################
from . import base
import datetime
import dateutil.parser
import subprocess
import string
from libqtile import utils
class KhalCalendar(base.ThreadedPollText):
"""Khal calendar widget
This widget will display the next appointment on your Khal calendar in the
qtile status bar. Appointments within the "reminder" time will be
highlighted.
"""
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
(
'reminder_color',
'FF0000',
'color of calendar entries during reminder time'
),
('foreground', 'FFFF33', 'default foreground color'),
('remindertime', 10, 'reminder time in minutes'),
('lookahead', 7, 'days to look ahead in the calendar'),
]
def __init__(self, **config):
base.ThreadedPollText.__init__(self, **config)
self.add_defaults(KhalCalendar.defaults)
self.text = 'Calendar not initialized.'
self.default_foreground = self.foreground
def poll(self):
# get today and tomorrow
now = datetime.datetime.now()
tomorrow = now + datetime.timedelta(days=1)
# get reminder time in datetime format
remtime = datetime.timedelta(minutes=self.remindertime)
# parse khal output for the next seven days
# and get the next event
args = ['khal', 'agenda', '--days', str(self.lookahead)]
cal = subprocess.Popen(args, stdout=subprocess.PIPE)
output = cal.communicate()[0].decode('utf-8')
output = output.split('\n')
if len(output) < 2:
return 'No appointments scheduled'
date = 'unknown'
endtime = None
for i in range(len(output)): # pylint: disable=consider-using-enumerate
if output[i].strip() == '':
continue
try:
starttime = dateutil.parser.parse(date + ' ' + output[i][:5],
ignoretz=True)
endtime = dateutil.parser.parse(date + ' ' + output[i][6:11],
ignoretz=True)
except ValueError:
try:
if output[i] == 'Today:':
date = str(now.month) + '/' + str(now.day) + '/' + \
str(now.year)
elif output[i] == 'Tomorrow:':
date = str(tomorrow.month) + '/' + str(tomorrow.day) + \
'/' + str(tomorrow.year)
else:
dateutil.parser.parse(output[i])
date = output[i]
continue
except ValueError:
pass # no date.
if endtime is not None and endtime > now:
data = date.replace(':', '') + ' ' + output[i]
break
else
|
:
data = 'No appointments in next ' + \
str(self.lookahead) + ' days'
# get rid of any garbage in appointment added by khal
data = ''.join(filter(lambda x: x in string.printable, data))
# colorize the event if it is within reminder time
if (starttime - remtime <= now) and (endtime > now):
self.foreground = utils.hex(self.reminder_color)
else:
self
|
.foreground = self.default_foreground
return data
|
BGmi/BGmi
|
tests/downloader/test_aria2.py
|
Python
|
mit
| 609
| 0
|
from unittest import mock
from bgmi.downloader.aria2_rpc import Aria2DownloadRPC
_token = "token:2333"
@mock.patch("bgmi.config.ARIA2_RPC_URL", "https://uuu")
@mock.patch("bgmi.config.ARIA2_RPC_TOKEN", "token:t")
def test_use_config():
with mock.patch("xmlrpc.client.ServerPr
|
oxy") as m1:
m1.return_value.aria2.getVersion.return_value = {"version
|
": "1.19.1"}
Aria2DownloadRPC()
m1.assert_has_calls(
[
mock.call("https://uuu"),
mock.call("https://uuu"),
mock.call().aria2.getVersion("token:t"),
]
)
|
Glottotopia/aagd
|
moin/local/moin/build/lib.linux-x86_64-2.6/MoinMoin/security/textcha.py
|
Python
|
mit
| 8,873
| 0.003043
|
# -*- coding: iso-8859-1 -*-
"""
MoinMoin - Text CAPTCHAs
This is just asking some (admin configured) questions and
checking if the answer is as expected. It is up to the wiki
admin to setup questions that a bot can not easily answer, but
humans can. It is recommended to setup SITE SPECIFIC questions
and not to share the questions with other sites (if everyone
asks the same questions / expects the same answers, spammers
could adapt to that).
TODO:
* roundtrip the question in some other way:
* make sure a q/a pair in the POST is for the q in the GET before
* make some nice CSS
* make similar changes to GUI editor
@copyright: 2007 by MoinMoin:ThomasWaldmann
@license: GNU GPL, see COPYING for details.
"""
import re
import random
from time import time
from MoinMoin import log
logging = log.getLogger(__name__)
from MoinMoin import wikiutil
from werkzeug.security import safe_str_cmp as safe_str_equal
from MoinMoin.support.python_compatibility import hmac_new
SHA1_LEN = 40 # length of hexdigest
TIMESTAMP_LEN = 10 # length of timestamp
class TextCha(object):
""" Text CAPTCHA support """
def __init__(self, request, question=None):
""" Initialize the TextCha.
@param request: the request object
@param question: see _init_qa()
"""
self.request = request
self.user_info = request.user.valid and request.user.name or request.remote_addr
self.textchas = self._get_textchas()
if self.textchas:
self.secret = request.cfg.secrets["security/textcha"]
self.expiry_time = request.cfg.textchas_expiry_time
self._init_qa(question)
def _get_textchas(self):
""" get textchas from the wiki config for the user's language (or default_language or en) """
request = self.request
groups = request.groups
cfg = request.cfg
user = request.user
disabled_group = cfg.textchas_disabled_group
if disabled_group and user.name and user.name in groups.get(disabled_group, []):
return None
textchas = cfg.textchas
if textchas:
lang = user.language or request.lang
logging.debug(u"TextCha: user.language == '%s'." % lang)
if lang not in textchas:
lang = cfg.language_default
logging.debug(u"TextCha: fallback to language_default == '%s'." % lang)
if lang not in textchas:
logging.error(u"TextCha: The textchas do not have content for language_default == '%s'! Falling back to English." % lang)
lang = 'en'
if lang not in textchas:
logging.error(u"TextCha: The textchas do not have content for 'en', auto-disabling textchas!")
cfg.textchas = None
lang = None
else:
lang = None
if lang is None:
return None
else:
logging.debug(u"TextCha: using lang = '%s'" % lang)
return textchas[lang]
def _compute_signature(self, question, timestamp):
signature = u"%s%d" % (question, timestamp)
return hmac_new(self.secret, signature.encode('utf-8')).hexdigest()
def _init_qa(self, question=None):
""" Initialize the question / answer.
@param question: If given, the given question will be used.
If None, a new question will be generated.
"""
if self.is_enabled():
if question is None:
self.question = random.choice(self.textchas.keys())
else:
self.question = question
try:
self.answer_regex = self.textchas[self.question]
self.answer_re = re.compile(self.answer_regex, re.U|re.I)
except KeyError:
# this question does not exist, thus there is no answer
self.answer_regex = ur"[Never match for cheaters]"
self.answer_re = None
logging.warning(u"TextCha: Non-existing question '%s'. User '%s' trying to cheat?" % (
self.question, self.user_info))
except re.error:
logging.error(u"TextCha: Invalid regex in answer for question '%s'" % self.question)
self._init_qa()
def is_enabled(self):
""" check if textchas are enabled.
They can be disabled for all languages if you use textchas = None or = {},
also they can be disabled for some specific lang
|
uage, like:
textchas = {
'en': {
'some question': 'some answer',
# ...
|
},
'de': {}, # having no questions for 'de' means disabling textchas for 'de'
# ...
}
"""
return not not self.textchas # we don't want to return the dict
def check_answer(self, given_answer, timestamp, signature):
""" check if the given answer to the question is correct and within the correct timeframe"""
if self.is_enabled():
if self.answer_re is not None:
success = self.answer_re.match(given_answer.strip()) is not None
else:
# someone trying to cheat!?
success = False
if not timestamp or timestamp + self.expiry_time < time():
success = False
try:
if not safe_str_equal(self._compute_signature(self.question, timestamp), signature):
success = False
except TypeError:
success = False
success_status = success and u"success" or u"failure"
logging.info(u"TextCha: %s (u='%s', a='%s', re='%s', q='%s')" % (
success_status,
self.user_info,
given_answer,
self.answer_regex,
self.question,
))
return success
else:
return True
def _make_form_values(self, question, given_answer):
timestamp = time()
question_form = "%s %d%s" % (
wikiutil.escape(question, True),
timestamp,
self._compute_signature(question, timestamp)
)
given_answer_form = wikiutil.escape(given_answer, True)
return question_form, given_answer_form
def _extract_form_values(self, form=None):
if form is None:
form = self.request.form
question = form.get('textcha-question')
signature = None
timestamp = None
if question:
# the signature is the last SHA1_LEN bytes of the question
signature = question[-SHA1_LEN:]
# operate on the remainder
question = question[:-SHA1_LEN]
try:
# the timestamp is the next TIMESTAMP_LEN bytes
timestamp = int(question[-TIMESTAMP_LEN:])
except ValueError:
pass
# there is a space between the timestamp and the question, so take away 1
question = question[:-TIMESTAMP_LEN - 1]
given_answer = form.get('textcha-answer', u'')
return question, given_answer, timestamp, signature
def render(self, form=None):
""" Checks if textchas are enabled and returns HTML for one,
or an empty string if they are not enabled.
@return: unicode result html
"""
if self.is_enabled():
question, given_answer, timestamp, signature = self._extract_form_values(form)
if question is None:
question = self.question
question_form, given_answer_form = self._make_form_values(question, given_answer)
result
|
skarra/PRS
|
libs/sqlalchemy/__init__.py
|
Python
|
agpl-3.0
| 4,621
| 0
|
# sqlalchemy/__init__.py
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from . import util as _util # noqa
from .inspection import inspect # noqa
from .schema import BLANK_SCHEMA # noqa
from .schema import CheckConstraint # noqa
from .schema import Column # noqa
from .schema import ColumnDefault # noqa
from .schema import Constraint # noqa
from .schema import DDL # noqa
from .schema import DefaultClause # noqa
from .schema import FetchedValue # noqa
from .schema import ForeignKey # noqa
from .schema import ForeignKeyConstraint # noqa
from .schema import Index # noqa
from .schema import MetaData # noqa
from .schema import PassiveDefault # noqa
from .schema import PrimaryKeyConstraint # noqa
from .schema import Sequence # noqa
from .schema import Table # noqa
from .schema import ThreadLocalMetaData # noqa
from .schema import UniqueConstraint # noqa
from .sql import alias # noqa
from .sql import all_ # noqa
from .sql import and_ # noqa
from .sql import any_ # noqa
from .sql import asc # noqa
from .sql import between # noqa
from .sql import bindparam # noqa
from .sql import case # noqa
from .sql import cast # noqa
from .sql import collate # noqa
from .sql import column # noqa
from .sql import delete # noqa
from .sql import desc # noqa
from .sql import distinct # noqa
from .sql import except_ # noqa
from .sql import except_all # noqa
from .sql import exists # noqa
from .sql import extract # noqa
from .sql import false # noqa
from .sql import func # noqa
from .sql import funcfilter # noqa
from .sql import insert # noqa
from .sql import intersect # noqa
from .sql import intersect_all # noqa
from .sql import join # noqa
from .sql import lateral # noqa
from .sql import literal # noqa
from .sql import literal_column # noqa
from .sql import modifier # noqa
from .sql import not_ # noqa
from .sql import null # noqa
from .sql import nullsfirst # noqa
from .sql import nullslast # noqa
from .sql import or_ # noqa
from .sql import outerjoin # noqa
from .sql import outparam # noqa
from .sql import over # noqa
from .sql import select # noqa
from .sql import subquery # noqa
from .sql import table # noqa
from .sql import tablesample # noqa
from .sql import text # noqa
from .sql import true # noqa
from .sql import tuple_ # noqa
from .sql import type_coerce # noqa
from .sql import union # noqa
from .sql import union_all # noqa
from .sql import update # noqa
from .sql import within_group # noqa
from .types import ARRAY # noqa
from .types import BIGINT # noqa
from .types import BigInteger # noqa
from .types impo
|
rt BINARY # noqa
from .types import Binary # noqa
from .types import BLOB # noqa
from .types import BOOLEAN # noqa
fro
|
m .types import Boolean # noqa
from .types import CHAR # noqa
from .types import CLOB # noqa
from .types import DATE # noqa
from .types import Date # noqa
from .types import DATETIME # noqa
from .types import DateTime # noqa
from .types import DECIMAL # noqa
from .types import Enum # noqa
from .types import FLOAT # noqa
from .types import Float # noqa
from .types import INT # noqa
from .types import INTEGER # noqa
from .types import Integer # noqa
from .types import Interval # noqa
from .types import JSON # noqa
from .types import LargeBinary # noqa
from .types import NCHAR # noqa
from .types import NUMERIC # noqa
from .types import Numeric # noqa
from .types import NVARCHAR # noqa
from .types import PickleType # noqa
from .types import REAL # noqa
from .types import SMALLINT # noqa
from .types import SmallInteger # noqa
from .types import String # noqa
from .types import TEXT # noqa
from .types import Text # noqa
from .types import TIME # noqa
from .types import Time # noqa
from .types import TIMESTAMP # noqa
from .types import TypeDecorator # noqa
from .types import Unicode # noqa
from .types import UnicodeText # noqa
from .types import VARBINARY # noqa
from .types import VARCHAR # noqa
from .engine import create_engine # noqa nosort
from .engine import engine_from_config # noqa nosort
__version__ = "1.3.3"
def __go(lcls):
global __all__
from . import events # noqa
from . import util as _sa_util
import inspect as _inspect
__all__ = sorted(
name
for name, obj in lcls.items()
if not (name.startswith("_") or _inspect.ismodule(obj))
)
_sa_util.dependencies.resolve_all("sqlalchemy")
__go(locals())
|
electblake/python-lipsumation
|
lipsumation/engines/admin.py
|
Python
|
mit
| 119
| 0.008403
|
from django.
|
contrib import admin
# Register your models here.
from .models import Engine
adm
|
in.site.register(Engine)
|
alexm92/sentry
|
src/sentry/web/frontend/account_notification.py
|
Python
|
bsd-3-clause
| 3,360
| 0.001488
|
from __future__ import absolute_import
import itertools
from django.contrib import messages
from django.core.context_processors import csrf
from django.db import transaction
from django.http import HttpResponseRedirect
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.utils.decorators import method_decorator
from sudo.decorators import sudo_required
from sentry.mod
|
els import (
Project, ProjectStatus
)
from sentry.plugins import plugins
from sentry.web.forms.accounts import (
ProjectEmailOptionsForm, NotificationSettingsForm,
NotificationReportSettingsForm
)
from sentry.web.decorators import login_required
from sentry.web.frontend.base import BaseView
from sentry.web.helpers import render_to_response
from sentr
|
y.utils.auth import get_auth_providers
from sentry.utils.safe import safe_execute
class AccountNotificationView(BaseView):
notification_settings_form = NotificationSettingsForm
@method_decorator(csrf_protect)
@method_decorator(never_cache)
@method_decorator(login_required)
@method_decorator(sudo_required)
@method_decorator(transaction.atomic)
def handle(self, request):
settings_form = self.notification_settings_form(
request.user, request.POST or None)
reports_form = NotificationReportSettingsForm(
request.user, request.POST or None,
prefix='reports')
project_list = list(Project.objects.filter(
team__organizationmemberteam__organizationmember__user=request.user,
team__organizationmemberteam__is_active=True,
status=ProjectStatus.VISIBLE,
).distinct())
project_forms = [
(project, ProjectEmailOptionsForm(
project, request.user,
request.POST or None,
prefix='project-%s' % (project.id,)
))
for project in sorted(project_list, key=lambda x: (
x.organization.name, x.name))
]
ext_forms = []
for plugin in plugins.all():
for form in safe_execute(plugin.get_notification_forms, _with_transaction=False) or ():
form = safe_execute(form, plugin, request.user, request.POST or None, prefix=plugin.slug,
_with_transaction=False)
if not form:
continue
ext_forms.append(form)
if request.POST:
all_forms = list(itertools.chain(
[settings_form, reports_form],
ext_forms,
(f for _, f in project_forms)
))
if all(f.is_valid() for f in all_forms):
for form in all_forms:
form.save()
messages.add_message(request, messages.SUCCESS, 'Your settings were saved.')
return HttpResponseRedirect(request.path)
context = csrf(request)
context.update({
'settings_form': settings_form,
'project_forms': project_forms,
'reports_form': reports_form,
'ext_forms': ext_forms,
'page': 'notifications',
'AUTH_PROVIDERS': get_auth_providers(),
})
return render_to_response('sentry/account/notifications.html', context, request)
|
antoinecarme/pyaf
|
tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_MovingMedian_Seasonal_Minute_LSTM.py
|
Python
|
bsd-3-clause
| 161
| 0.049689
|
import tests.model_c
|
ontrol.test_o
|
zone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['MovingMedian'] , ['Seasonal_Minute'] , ['LSTM'] );
|
pytroll/satpy
|
satpy/tests/reader_tests/test_avhrr_l0_hrpt.py
|
Python
|
gpl-3.0
| 8,951
| 0.001452
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2021 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Tests for the hrpt reader."""
import os
import unittest
from contextlib import suppress
from tempfile import NamedTemporaryFile
from unittest import mock
import numpy as np
import xarray as xr
from satpy.readers.hrpt import HRPTFile, dtype
from satpy.tests.reader_tests.test_avhrr_l1b_gaclac import PygacPatcher
from satpy.tests.utils import make_dataid
NUMBER_OF_SCANS = 10
SWATH_WIDTH = 2048
class TestHRPTWithFile(unittest.TestCase):
"""Test base class with writing a fake file."""
def setUp(self) -> None:
"""Set up the test case."""
test_data = np.ones(NUMBER_OF_SCANS, dtype=dtype)
# Channel 3a
test_data["id"]["id"][:5] = 891
# Channel 3b
test_data["id"]["id"][5:] = 890
with NamedTemporaryFile(mode='w+', suffix='.hmf', delete=False) as hrpt_file:
self.filename = hrpt_file.name
test_data.tofile(hrpt_file)
def tearDown(self) -> None:
"""Tear down the test case."""
with suppress(OSError):
os.remove(self.filename)
def _get_dataset(self, dataset_id):
fh = HRPTFile(self.filename, {}, {})
return fh.get_dataset(dataset_id, {})
class TestHRPTReading(TestHRPTWithFile):
"""Test case for reading hrpt data."""
def test_reading(self):
"""Test that data is read."""
fh = HRPTFile(self.filename, {}, {})
assert fh._data is not None
class TestHRPTGetUncalibratedData(TestHRPTWithFile):
"""Test case for reading uncalibrated hrpt data."""
def _get_channel_1_counts(self):
return self._get_dataset(make_dataid(name='1', calibration='counts'))
def test_get_dataset_returns_a_dataarray(self):
"""Test that get_dataset returns a dataarray."""
result = self._get_channel_1_counts()
assert isinstance(result, xr.DataArray)
def test_platform_name(self):
"""Test that the platform name is correct."""
result = self._get_channel_1_counts()
assert result.attrs['platform_name'] == 'NOAA 19'
def test_no_calibration_values_are_1(self):
"""Test that the values of non-calibrated data is 1."""
result = self._get_channel_1_counts()
assert (result.values == 1).all()
def fake_calibrate_solar(data, *args, **kwargs):
"""Fake calibratio
|
n."""
del args, kwargs
return data * 25.43 + 3
def fake_calibrate_therm
|
al(data, *args, **kwargs):
"""Fake calibration."""
del args, kwargs
return data * 35.43 + 3
class CalibratorPatcher(PygacPatcher):
"""Patch pygac."""
def setUp(self) -> None:
"""Patch pygac's calibration."""
super().setUp()
# Import things to patch here to make them patchable. Otherwise another function
# might import it first which would prevent a successful patch.
from pygac.calibration import Calibrator, calibrate_solar, calibrate_thermal
self.Calibrator = Calibrator
self.calibrate_thermal = calibrate_thermal
self.calibrate_thermal.side_effect = fake_calibrate_thermal
self.calibrate_solar = calibrate_solar
self.calibrate_solar.side_effect = fake_calibrate_solar
class TestHRPTWithPatchedCalibratorAndFile(CalibratorPatcher, TestHRPTWithFile):
"""Test case with patched calibration routines and a synthetic file."""
def setUp(self) -> None:
"""Set up the test case."""
CalibratorPatcher.setUp(self)
TestHRPTWithFile.setUp(self)
def tearDown(self):
"""Tear down the test case."""
CalibratorPatcher.tearDown(self)
TestHRPTWithFile.tearDown(self)
class TestHRPTGetCalibratedReflectances(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated reflectances from hrpt data."""
def _get_channel_1_reflectance(self):
"""Get the channel 1 reflectance."""
dataset_id = make_dataid(name='1', calibration='reflectance')
return self._get_dataset(dataset_id)
def test_calibrated_reflectances_values(self):
"""Test the calibrated reflectance values."""
result = self._get_channel_1_reflectance()
np.testing.assert_allclose(result.values, 28.43)
class TestHRPTGetCalibratedBT(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated brightness temperature from hrpt data."""
def _get_channel_4_bt(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='4', calibration='brightness_temperature')
return self._get_dataset(dataset_id)
def test_calibrated_bt_values(self):
"""Test the calibrated reflectance values."""
result = self._get_channel_4_bt()
np.testing.assert_allclose(result.values, 38.43)
class TestHRPTChannel3(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated brightness temperature from hrpt data."""
def _get_channel_3b_bt(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3b', calibration='brightness_temperature')
return self._get_dataset(dataset_id)
def _get_channel_3a_reflectance(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3a', calibration='reflectance')
return self._get_dataset(dataset_id)
def _get_channel_3a_counts(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3a', calibration='counts')
return self._get_dataset(dataset_id)
def test_channel_3b_masking(self):
"""Test that channel 3b is split correctly."""
result = self._get_channel_3b_bt()
assert np.isnan(result.values[:5]).all()
assert np.isfinite(result.values[5:]).all()
def test_channel_3a_masking(self):
"""Test that channel 3a is split correctly."""
result = self._get_channel_3a_reflectance()
assert np.isnan(result.values[5:]).all()
assert np.isfinite(result.values[:5]).all()
def test_uncalibrated_channel_3a_masking(self):
"""Test that channel 3a is split correctly."""
result = self._get_channel_3a_counts()
assert np.isnan(result.values[5:]).all()
assert np.isfinite(result.values[:5]).all()
class TestHRPTNavigation(TestHRPTWithFile):
"""Test case for computing HRPT navigation."""
def setUp(self) -> None:
"""Set up the test case."""
super().setUp()
self.fake_lons = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH))
self.fake_lats = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH)) * 2
def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt):
"""Prepare the mocks."""
Orbital.return_value.get_position.return_value = mock.MagicMock(), mock.MagicMock()
get_lonlatalt.return_value = (mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
SatelliteInterpolator.return_value.interpolate.return_value = self.fake_lons, self.fake_lats
@mock.patch.multiple('satpy.readers.hrpt',
Orbital=mock.DEFAULT,
compute_pixels=mock.DEFAULT,
get_lonlatalt=mock.DEFAULT,
SatelliteInterpolator=mock.DEFAULT)
def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator):
"""Check that latitudes are returned properly."""
self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt)
dataset_id = m
|
Nurdok/pep257
|
setup.py
|
Python
|
mit
| 1,105
| 0
|
from __future__ import with_statement
import os
from setuptools import setup
|
# Do not update the version manually - it is mana
|
ged by `bumpversion`.
version = '2.0.1rc'
setup(
name='pydocstyle',
version=version,
description="Python docstring style checker",
long_description=open('README.rst').read(),
license='MIT',
author='Amir Rachum',
author_email='amir@rachum.com',
url='https://github.com/PyCQA/pydocstyle/',
classifiers=[
'Intended Audience :: Developers',
'Environment :: Console',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
keywords='pydocstyle, PEP 257, pep257, PEP 8, pep8, docstrings',
packages=('pydocstyle',),
package_dir={'': 'src'},
package_data={'pydocstyle': ['data/*.txt']},
install_requires=[
'snowballstemmer',
'six',
],
entry_points={
'console_scripts': [
'pydocstyle = pydocstyle.cli:main',
],
},
)
|
mzdaniel/oh-mainline
|
vendor/packages/kombu/funtests/tests/test_redis.py
|
Python
|
agpl-3.0
| 399
| 0
|
from funtests import transport
class test_redis(transport.TransportCase):
transport = "redis"
prefix = "redis"
def after_connect(self, connection):
client = connection.channel().client
client.info()
def test_cant_connect_
|
raises_connection_error(self):
conn = self.get_connection(port=65534)
self.assertRaises(conn.connec
|
tion_errors, conn.connect)
|
82Flex/DCRM
|
suit/sortables.py
|
Python
|
agpl-3.0
| 6,638
| 0.000452
|
from copy import deepcopy, copy
from django.contrib import admin
from django.contrib.admin.views.main import ChangeList
from django.contrib.contenttypes.admin import GenericTabularInline, GenericStackedInline
from django.forms import ModelForm, NumberInput
from django.db import models
class SortableModelAdminBase(object):
"""
Base class for SortableTabularInline and SortableModelAdmin
"""
sortable = 'order'
class Media:
js = ('suit/js/suit.sortables.js',)
class SortableListForm(ModelForm):
"""
Just Meta holder class
"""
class Meta:
widgets = {
'order': NumberInput(
attrs={'class': 'hidden-xs-up suit-sortable'})
}
class SortableChangeList(ChangeList):
"""
Class that forces ordering by sortable param only
"""
def get_ordering(self, request, queryset):
if self.model_admin.sortable_is_enabled():
return [self.model_admin.sortable, '-' + self.model._meta.pk.name]
return super(SortableChangeList, self).get_ordering(request, queryset)
class SortableTabularInlineBase(SortableModelAdminBase):
"""
Sortable tabular inline
"""
def __init__(self, *args, **kwargs):
super(SortableTabularInlineBase, self).__init__(*args, **kwargs)
self.ordering = (self.sortable,)
self.fields = self.fields or []
if self.fields and self.sortable not in self.fields:
self.fields = list(self.fields) + [self.sortable]
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == self.sortable:
kwargs['widget'] = SortableListForm.Meta.widgets['order']
return super(SortableTabularInlineBase, self).formfield_for_dbfield(
db_field, **kwargs)
class SortableTabularInline(SortableTabularInlineBase, admin.TabularInline):
pass
class SortableGenericTabularInline(SortableTabularInlineBase,
GenericTabularInline):
pass
class SortableStackedInlineBase(SortableModelAdminBase):
"""
Sortable stacked inline
"""
def __init__(self, *args, **kwargs):
super(SortableStackedInlineBase, self).__init__(*args, **kwargs)
self.ordering = (self.sortable,)
def get_fieldsets(self, *args, **kwargs):
"""
Iterate all fieldsets and make sure sortable is in the first fieldset
Remove sortable from every other fieldset, if by some reason someone
has added it
"""
fieldsets = super(SortableStackedInlineBase, self).get_fieldsets(*args, **kwargs)
sortable_added = False
for fieldset in fieldsets:
for line in fieldset:
if not line or not isinstance(line, dict):
continue
fields = line.get('fields')
if self.sortable in fields:
fields.remove(self.sortable)
# Add sortable field always as first
if not sortable_added:
fields.insert(0, self.sortable)
sortable_added = True
break
return fieldset
|
s
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == self.sortable:
kwargs['widget'] =
|
deepcopy(SortableListForm.Meta.widgets['order'])
kwargs['widget'].attrs['class'] += ' suit-sortable-stacked'
kwargs['widget'].attrs['rowclass'] = ' suit-sortable-stacked-row'
return super(SortableStackedInlineBase, self).formfield_for_dbfield(db_field, **kwargs)
class SortableStackedInline(SortableStackedInlineBase, admin.StackedInline):
pass
class SortableGenericStackedInline(SortableStackedInlineBase,
GenericStackedInline):
pass
class SortableModelAdmin(SortableModelAdminBase, admin.ModelAdmin):
"""
Sortable change list
"""
def __init__(self, *args, **kwargs):
super(SortableModelAdmin, self).__init__(*args, **kwargs)
# Keep originals for restore
self._original_ordering = copy(self.ordering)
self._original_list_display = copy(self.list_display)
self._original_list_editable = copy(self.list_editable)
self._original_exclude = copy(self.exclude)
self._original_list_per_page = self.list_per_page
self.enable_sortable()
def merge_form_meta(self, form):
"""
Prepare Meta class with order field widget
"""
if not getattr(form, 'Meta', None):
form.Meta = SortableListForm.Meta
if not getattr(form.Meta, 'widgets', None):
form.Meta.widgets = {}
form.Meta.widgets[self.sortable] = SortableListForm.Meta.widgets[
'order']
def get_changelist_form(self, request, **kwargs):
form = super(SortableModelAdmin, self).get_changelist_form(request,
**kwargs)
self.merge_form_meta(form)
return form
def get_changelist(self, request, **kwargs):
return SortableChangeList
def enable_sortable(self):
self.list_per_page = 500
self.ordering = (self.sortable,)
if self.list_display and self.sortable not in self.list_display:
self.list_display = list(self.list_display) + [self.sortable]
self.list_editable = self.list_editable or []
if self.sortable not in self.list_editable:
self.list_editable = list(self.list_editable) + [self.sortable]
self.exclude = self.exclude or []
if self.sortable not in self.exclude:
self.exclude = list(self.exclude) + [self.sortable]
def disable_sortable(self):
if not self.sortable_is_enabled():
return
self.ordering = self._original_ordering
self.list_display = self._original_list_display
self.list_editable = self._original_list_editable
self.exclude = self._original_exclude
self.list_per_page = self._original_list_per_page
def sortable_is_enabled(self):
return self.list_display and self.sortable in self.list_display
def save_model(self, request, obj, form, change):
if not obj.pk:
max_order = obj.__class__.objects.aggregate(
models.Max(self.sortable))
try:
next_order = max_order['%s__max' % self.sortable] + 1
except TypeError:
next_order = 1
setattr(obj, self.sortable, next_order)
super(SortableModelAdmin, self).save_model(request, obj, form, change)
|
tensorflow/tensorflow
|
tensorflow/python/pywrap_dlopen_global_flags.py
|
Python
|
apache-2.0
| 1,919
| 0.003127
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""If possible, exports all symbols with RTLD_GLOBAL.
Note that this file is only imported by pywrap_tensorflow.py if this is a static
build (meaning there is no explicit framework cc_binary shared object dependency
of _pywrap_tensorflow_internal.so). For regular (non-static) builds, RTLD_GLOBAL
is not necessary, since the dynamic dependencies of custom/contrib ops are
explicit.
"""
import ctypes
import sys
# On UNIX-based platforms, pywrap_tensorflow is a SWIG-generated python library
# that dynamically loads _pywrap_tensorflow.so. The default mode for loading
# keeps all the symbol private and not visible to other libraries that may be
# loaded. Setting the mode to RTLD_GLOBAL to make the symbols visible, so that
# custom op libraries imported using `tf.load_op_library()` can access symbols
# defined in
|
_pywrap_tensorflow.so.
_use_rtld_global = (hasattr(sys, 'getdlopenflags')
and hasattr(sys, 'setdlopenflags'))
if _use_rtld_global:
_default_dlopen_flags = sys.getdlopenflags()
def set_dlopen_
|
flags():
if _use_rtld_global:
sys.setdlopenflags(_default_dlopen_flags | ctypes.RTLD_GLOBAL)
def reset_dlopen_flags():
if _use_rtld_global:
sys.setdlopenflags(_default_dlopen_flags)
|
flaviogrossi/sockjs-cyclone
|
sockjs/cyclone/transports/htmlfile.py
|
Python
|
mit
| 2,041
| 0
|
from cyclone.web import asynchronous
from sockjs.cyclone import proto
from sockjs.cyclone.transports import streamingbase
# HTMLFILE template
HTMLFILE_HEAD = r'''
<!doctype html>
<html><head>
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
</head><body><h2>Don't panic!</h2>
<script>
document.domain = document.domain;
var c = parent.%s;
c.start();
function p(d) {c.message(d);};
window.onload = function() {c.stop();};
</script>
'''.strip()
HTMLFILE_HEAD += ' ' * (1024 - len(HTMLFILE_HEAD) + 14)
HTMLFILE_HEAD += '\r\n\r\n'
class HtmlFileTransport(streamingbase.StreamingTransportBase):
name = 'htmlfile'
def initialize(self, server):
super(HtmlFileTransport, self).initialize(server)
@asynchronous
def get(self, session_id):
# Start response
self.preflight()
self.handle_session_cookie()
self.disable_cache()
self.set_header('Content-Type', 'text/html; charset=UTF-8')
# Grab callback par
|
ameter
callback = self.get_argument('c', None)
if not callback:
self.write('"cal
|
lback" parameter required')
self.set_status(500)
self.finish()
return
self.write(HTMLFILE_HEAD % callback)
self.flush()
# Now try to attach to session
if not self._attach_session(session_id):
self.finish()
return
# Flush any pending messages
if self.session:
self.session.flush()
def connectionLost(self, reason):
self.session.delayed_close()
self._detach()
def send_pack(self, message):
# TODO: Just do escaping
msg = '<script>\np(%s);\n</script>\r\n' % proto.json_encode(message)
self.write(msg)
self.flush()
# Close connection based on amount of data transferred
if self.should_finish(len(msg)):
self._detach()
self.safe_finish()
|
alxgu/ansible
|
lib/ansible/modules/database/postgresql/postgresql_membership.py
|
Python
|
gpl-3.0
| 12,662
| 0.002369
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Andrew Klychkov (@Andersson007) <aaklychkov@mail.ru>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'supported_by': 'community',
'status': ['preview']
}
DOCUMENTATION = r'''
---
module: postgresql_membership
short_description: Add or remove PostgreSQL roles from groups
description:
- Adds or removes PostgreSQL roles from groups (other roles)
U(https://www.postgresql.org/docs/current/role-membership.html).
- Users are roles with login privilege (see U(https://www.postgresql.org/docs/current/role-attributes.html) for more information).
- Groups are PostgreSQL roles usually without LOGIN privelege.
- "Common use case:"
- 1) add a new group (groups) by M(postgresql_user) module
U(https://docs.ansible.com/ansible/latest/modules/postgresql_user_module.html) with I(role_attr_flags=NOLOGIN)
- 2) grant them desired privileges by M(postgresql_privs) module
U(https://docs.ansible.com/ansible/latest/modules/postgresql_privs_module.html)
- 3) add desired PostgreSQL users to the new group (groups) by this module
version_added: '2.8'
options:
groups:
description:
- The list of groups (roles) that need to be granted to or revoked from I(target_roles).
required: yes
type: list
aliases:
- group
- source_role
- source_roles
target_roles:
description:
- The list of target roles (groups will be granted to them).
required: yes
type: list
aliases:
- target_role
- users
- user
fail_on_role:
description:
- If C(yes), fail when group or target_role doesn't exist. If C(no), just warn and continue.
default: yes
type: bool
state:
description:
- Membership state.
- I(state=present) implies the I(groups)must be granted to I(target_roles).
- I(state=absent) implies the I(groups) must be revoked from I(target_roles).
type: str
default: present
choices: [ absent, present ]
db:
description:
- Name of database to connect to.
type: str
aliases:
- login_db
session_role:
description:
- Switch to session_role after connecting.
The specified session_role must be a role that the current login_user is a member of.
- Permissions checking for SQL commands is carried out as though
the session_role were the one that had logged in originally.
type: str
notes:
- The default authentication assumes that you are either logging in as or
sudo'ing to the postgres account on the host.
- To avoid "Peer authentication failed for user postgres" error,
use postgres user as a I(become_user).
- This module uses psycopg2, a Python PostgreSQL database adapter. You must
ensure that psycopg2 is installed on the host before using this module.
- If the remote host is the PostgreSQL server (which is the default case), then
PostgreSQL must also be installed on the remote host.
- For Ubuntu-based systems, install the postgresql, libpq-dev, and python-psycopg2 packages
on the remote host before using this module.
requirements: [ psycopg2 ]
author:
- Andrew Klychkov (@Andersson007)
extends_documentation_fragment: postgres
'''
EXAMPLES = r'''
- name: Grant role read_only to alice and bob
postgresql_membership:
group: read_only
target_roles:
- alice
- bob
state: present
# you can also use target_roles: alice,bob,etc to pass the role list
- name: Revoke role read_only and exec_func from bob. Ignore if roles don't exist
postgresql_membership:
groups:
- read_only
- exec_func
target_role: bob
fail_on_role: no
state: absent
'''
RETURN = r'''
queries:
description: List of executed queries.
returned: always
type: str
sample: [ "GRANT \"user_ro\" TO \"alice\"" ]
granted:
description: Dict of granted groups and roles.
returned: if I(state=present)
type: dict
sample: { "ro_group": [ "alice", "bob" ] }
revoked:
description: Dict of revoked groups and roles.
returned: if I(state=absent)
type: dict
sample: { "ro_group": [ "alice", "bob" ] }
state:
description: Membership state that tried to be set.
returned: always
type: str
sample: "present"
'''
try:
import psycopg2
HAS_PSYCOPG2 = True
except Impor
|
tError:
HAS_PSYCOPG2 = False
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.database import SQLParseError, pg_quote_identifier
from ansible.module_util
|
s.postgres import connect_to_db, postgres_common_argument_spec
from ansible.module_utils._text import to_native
from ansible.module_utils.six import iteritems
class PgMembership(object):
def __init__(self, module, cursor, groups, target_roles, fail_on_role):
self.module = module
self.cursor = cursor
self.target_roles = [r.strip() for r in target_roles]
self.groups = [r.strip() for r in groups]
self.executed_queries = []
self.granted = {}
self.revoked = {}
self.fail_on_role = fail_on_role
self.non_existent_roles = []
self.changed = False
self.__check_roles_exist()
def grant(self):
for group in self.groups:
self.granted[group] = []
for role in self.target_roles:
# If role is in a group now, pass:
if self.__check_membership(group, role):
continue
query = "GRANT %s TO %s" % ((pg_quote_identifier(group, 'role'),
(pg_quote_identifier(role, 'role'))))
self.changed = self.__exec_sql(query, ddl=True)
if self.changed:
self.granted[group].append(role)
return self.changed
def revoke(self):
for group in self.groups:
self.revoked[group] = []
for role in self.target_roles:
# If role is not in a group now, pass:
if not self.__check_membership(group, role):
continue
query = "REVOKE %s FROM %s" % ((pg_quote_identifier(group, 'role'),
(pg_quote_identifier(role, 'role'))))
self.changed = self.__exec_sql(query, ddl=True)
if self.changed:
self.revoked[group].append(role)
return self.changed
def __check_membership(self, src_role, dst_role):
query = ("SELECT ARRAY(SELECT b.rolname FROM "
"pg_catalog.pg_auth_members m "
"JOIN pg_catalog.pg_roles b ON (m.roleid = b.oid) "
"WHERE m.member = r.oid) "
"FROM pg_catalog.pg_roles r "
"WHERE r.rolname = '%s'" % dst_role)
res = self.__exec_sql(query, add_to_executed=False)
membership = []
if res:
membership = res[0][0]
print('MEMBERSHIP ', membership)
if not membership:
return False
if src_role in membership:
return True
return False
def __check_roles_exist(self):
for group in self.groups:
if not self.__role_exists(group):
if self.fail_on_role:
self.module.fail_json(msg="Role %s does not exist" % group)
else:
self.module.warn("Role %s does not exist, pass" % group)
self.non_existent_roles.append(group)
for role in self.target_roles:
if not self.__role_exists(role):
if self.fail_on_role:
self.module.fail_json(msg="Role %s does not exist" % role)
else:
self.module.warn("Role %s does not exist, pass" % role)
if role not in self.groups:
self.non_existent_roles.append(role)
else:
if self.fail_on_role:
self.module.exit_json(msg="Ro
|
pombreda/ximenez
|
setup.py
|
Python
|
gpl-3.0
| 1,457
| 0
|
import os
from setuptools import setup
from setuptools import find_packages
version_file = 'VERSION.txt'
version = open(version_file).read().strip()
description_file = 'README.txt'
description = open(description_file).read().split('\n\n')[0].strip()
description = description.replace('\n', ' ')
long_description_file = os.path.join('doc', 'README.txt')
long_description = open(long_description_file).read().strip()
setup(
name='ximenez',
version=version,
packages=find_packages('src'),
namespace_packages=(),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': ('ximenez=ximenez.xim:main', )
},
author='Damien Baty',
author_email='damien.baty@remove-me.gmail.com',
description=description,
long_description=long_description,
license='GNU GPL',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License (GPL)'
|
,
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System',
'Topic :: Utilities'],
keywords='collector action plug-in plugin',
url='http://code.noherring.com/ximenez',
download_url='http://cheeseshop.python.org/pypi/xim
|
enez',
)
|
rahul-c1/scrapy
|
scrapy/tests/test_djangoitem/__init__.py
|
Python
|
bsd-3-clause
| 2,978
| 0
|
import os
from twisted.trial import unittest
from scrapy.contrib.djangoitem import DjangoItem, Field
from scrapy import optional_features
os.environ['DJANGO_SETTINGS_MODULE'] = 'scrapy.tests.test_djangoitem.settings'
if 'django' in optional_features:
from .models import Person, IdentifiedPerson
class BasePersonItem(DjangoItem):
django_model = Person
class NewFieldPersonItem(BasePersonItem):
other = Field()
class OverrideFieldPersonItem(BasePersonItem):
age = Field()
class IdentifiedPersonItem(DjangoItem):
django_model = IdentifiedPerson
class DjangoItemTest(unittest.TestCase):
def setUp(self):
if 'django' not in optional_features:
raise unittest.SkipTest("Django is not available")
def test_base(self):
i = BasePersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
def test_new_fields(self):
i = NewFieldPersonItem()
self.assertEqual(i.fields.keys(), ['age', 'other', 'name'])
def test_override_field(self):
i = OverrideFieldPersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
def test_custom_primary_key_field(self):
"""
Test that if a custom primary key exists, it is
in the field list.
"""
i = IdentifiedPersonItem()
self.assertEqual(i.fields.keys(), ['age', 'identifier', 'name'])
def test_save(self):
i = BasePersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
i['name'] = 'John'
i['age'] = '22'
person = i.save(commit=False)
self.assertEqual(person.name, 'John')
self.assertEqual(person.age, '22')
def test_override_save(self):
i = OverrideFieldPersonItem()
i['name'] = 'John'
# it is not
|
obvious that "age" should be saved also, since it was
# redefined in child class
i['age'] = '22'
person = i.save(commit=False)
self.assertEqual(person.name, 'John')
self.assertEqual(person.age, '22')
def test_validation(self):
long_name = 'z' * 300
i = BasePersonItem(name=long_name)
self.assertFalse(i.is_valid())
self.assertEqu
|
al(set(i.errors), set(['age', 'name']))
i = BasePersonItem(name='John')
self.assertTrue(i.is_valid(exclude=['age']))
self.assertEqual({}, i.errors)
# once the item is validated, it does not validate again
i['name'] = long_name
self.assertTrue(i.is_valid())
def test_override_validation(self):
i = OverrideFieldPersonItem()
i['name'] = 'John'
self.assertFalse(i.is_valid())
i = i = OverrideFieldPersonItem()
i['name'] = 'John'
i['age'] = '22'
self.assertTrue(i.is_valid())
def test_default_field_values(self):
i = BasePersonItem()
person = i.save(commit=False)
self.assertEqual(person.name, 'Robot')
|
jermnelson/metadata-day-2013
|
web.py
|
Python
|
gpl-2.0
| 3,972
| 0.004532
|
"""
Module for the Colorado Alliance of Research Libraries Metadata 2013
Presentation
Copyright (C) 2013 Jeremy Nelson
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
__author__ = "Jeremy Nelson"
import argparse
import datetime
import json
import os
import redis
from bottle import abort, request, route, run, static_file
from bottle import jinja2_view as view
from bottle import jinja2_template as template
from bottle import FlupFCGIServer
PROJECT_ROOT = os.path.split(os.path.abspath(__name__))[0]
PRESENTATION_INFO = json.load(open(os.path.join(PROJECT_ROOT,
'slides.json'),
|
'rb'))
SLIDES = PRESENTATION_INFO.get('slides')
DEMO_REDIS = redis.StrictRedis()
FLUP = False
@route('/metadata-day-2013/assets/<type_of:path>/<filename:path>')
def send_asset(type_of,filename):
local_path = os.path.join(PROJECT_ROOT,
"assets",
type_of,
|
filename)
if os.path.exists(local_path):
return static_file(filename,
root=os.path.join(PROJECT_ROOT,
"assets",
type_of))
@route("/metadata-day-2013/bibframe.html")
def bibframe():
return template('bibframe',
category='slides',
next_slide=SLIDES[2],
slide=SLIDES[1],
slides=SLIDES)
@route("/metadata-day-2013/bibframe-adr.html")
def bibframe():
return template('bibframe-adr',
category='slides',
slide=SLIDES[-1],
slides=SLIDES)
@route("/metadata-day-2013/linked-data.html")
def linked_data():
return template('linked-data',
category='slides',
next_slide=SLIDES[1],
slide=SLIDES[0],
slides=SLIDES)
@route("/metadata-day-2013/marc-to-bibframe.html")
def marc_to_bibframe():
return template('marc-bibframe',
category='slides',
next_slide=SLIDES[3],
slide=SLIDES[2],
slides=SLIDES)
@route("/metadata-day-2013/mods-to-bibframe.html")
def mods_to_bibframe():
return template('mods-bibframe',
category='slides',
next_slide=SLIDES[4],
slide=SLIDES[3],
slides=SLIDES)
@route("/metadata-day-2013/resources.html")
def resources():
return template('resources',
category='home',
slides=SLIDES)
@route("/metadata-day-2013/")
def index():
return template('index',
category='home',
slides=SLIDES)
parser = argparse.ArgumentParser(
description='Run ADR Metadata Day 2013 Presentation')
parser.add_argument('mode',
help='Run in either prod (production) or dev (development)')
mode = parser.parse_args().mode
if mode == 'prod':
run(server=FlupFCGIServer,
host='0.0.0.0',
port=9010)
elif mode == 'dev':
run(host='0.0.0.0',
port=9010,
debug=True,
reloader=True)
else:
print("ERROR unknown run mode {0}".format(mode))
|
CAB-LAB/cablab-core
|
esdl/providers/test_provider.py
|
Python
|
gpl-3.0
| 1,856
| 0.003233
|
import numpy as np
from esdl.cube_provider import CubeSourceProvider
from esdl.cube_config import CubeConfig
class TestCubeSourceProvider(CubeSourceProvider):
"""
CubeSourceProvider implementation used for testing cube generation without any source files.
The following usage generates a cube with two vari
|
ables ``test_1`` and ``test_2``:
cube-gen -c ./myconf
|
.py ./mycube test:var=test_1 test:var=test_2
:param cube_config: Specifies the fixed layout and conventions used for the cube.
:param name: The provider's registration name. Defaults to ``"test"``.
:param var: Name of a (float32) variable which will be filled with random numbers.
"""
def __init__(self, cube_config: CubeConfig, name: str = 'test', var: str = 'test'):
super(TestCubeSourceProvider, self).__init__(cube_config, name)
self._variable_name = var
self._value = 0.0
def prepare(self):
pass
@property
def temporal_coverage(self):
return self.cube_config.start_time, self.cube_config.end_time
@property
def spatial_coverage(self):
return 0, 0, self.cube_config.grid_width, self.cube_config.grid_height
@property
def variable_descriptors(self):
return {
self._variable_name: {
'data_type': np.float32,
'fill_value': np.nan,
'scale_factor': 1.0,
'add_offset': 0.0,
}
}
def compute_variable_images(self, period_start, period_end):
self._value += 0.1
image_width = self.cube_config.grid_width
image_height = self.cube_config.grid_height
image_shape = (image_height, image_width)
return {
self._variable_name: np.full(image_shape, self._value, dtype=np.float32)
}
def close(self):
pass
|
google/dynamic-video-depth
|
loggers/html_template.py
|
Python
|
apache-2.0
| 2,786
| 0.001436
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
TABLE_HEADER = """
<html>
<head>
<script type="text/javascript" language="javascript" src="https://code.jquery.com/jquery-3.3.1.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/unveil/1.3.0/jquery.unveil.min.js" integrity="sha512-smKadbDZ1g5bsWtP1BuWxgBq1WeP3Se1DLxeeBB+4Wf/HExJsJ3OV6lzravxS0tFd43Tp4x+zlT6/yDTtr+mew==" crossorigin="anonymous"></script>
<script type="text/javascript" language="javascript"
src="https://cdn.datatables.net/1.10.20/js/jquery.dataTables.min.js"></script>
<script type="text/javascript" language="javascript"
src="https://cdn.datatables.net/buttons/1.6.1/js/dataTables.buttons.min.js"></script>
<script type="text/j
|
avascript" language="javascript"
src="https://cdn.datatables.net/buttons/1.6.1/js/buttons.colVis.min.js"></scr
|
ipt>
<link rel="stylesheet" type="text/css" href="https://cdn.datatables.net/1.10.20/css/jquery.dataTables.min.css">
</link>
<link rel="stylesheet" type="text/css"
href="https://ztzhang.info/assets/css/buttons.dataTables.min.css">
</link>
<link rel="stylesheet" type="text/css"
href="https://ztzhang.info/assets/css/datatable.css">
</link>
<script>
$(document).ready(function () {{
var table = $('#myTable').DataTable({{
dom: 'Blfrtip',
autoWidth: false,
buttons: [
'columnsToggle'
],
"lengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
"columnDefs": [
{{"targets": "_all",
"className": "dt-center"}}
],
"drawCallback": function( settings ) {{
$("#myTable img:visible").unveil();
}},
}});
}});
</script>
</head>
<body bgcolor='black'>
<table id="myTable" class="cell-border compact stripe">
<thead>
<tr>
{table_header}
</tr>
</thead>
<tbody>
{table_body}
</tbody>
</table>
</body>
</html>
"""
image_tag_template = "<td><img src=\"{image_path}\" style=\"max-width:100%;height:auto;\"></td>"
|
MattBlack85/django-gruyere
|
gruyere/settings.py
|
Python
|
mit
| 1,620
| 0
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = '#zw#k0g76&a!ulj820of0+i#y(-y4%)sed3k-
|
3q9mw8kzn7)jf'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.
|
staticfiles',
)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates/'),
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'gruyere.urls'
WSGI_APPLICATION = 'gruyere.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'gruyere',
'USER': 'cheddar',
'PASSWORD': 'cheese'
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static/'),
)
|
arcyfelix/ML-DL-AI
|
Supervised Learning/GANs/dcgan-tensorflayer/tensorlayer/nlp.py
|
Python
|
apache-2.0
| 32,641
| 0.00432
|
#! /usr/bin/python
# -*- coding: utf8 -*-
import tensorflow as tf
import os
from sys import platform as _platform
import collections
import random
import numpy as np
import warnings
from six.moves import xrange
from tensorflow.python.platform import gfile
import re
## Iteration functions
def generate_skip_gram_batch(data, batch_size, num_skips, skip_window, data_index=0):
"""Generate a training batch for the Skip-Gram model.
Parameters
----------
data : a list
To present context.
batch_size : an int
Batch size to return.
num_skips : an int
How many times to reuse an input to generate a label.
skip_window : an int
How many words to consider left and right.
data_index : an int
Index of the context location.
without using yield, this code use data_index to instead.
Returns
--------
batch : a list
Inputs
labels : a list
Labels
data_index : an int
Index of the context location.
Examples
--------
>>> Setting num_skips=2, skip_window=1, use the right and left words.
>>> In the same way, num_skips=4, skip_window=2 means use the nearby 4 words.
>>> data = [1,2,3,4,5,6,7,8,9,10,11]
>>> batch, labels, data_index = tl.nlp.generate_skip_gram_batch(data=data, batch_size=8, num_skips=2, skip_window=1, data_ind
|
ex=
|
0)
>>> print(batch)
... [2 2 3 3 4 4 5 5]
>>> print(labels)
... [[3]
... [1]
... [4]
... [2]
... [5]
... [3]
... [4]
... [6]]
References
-----------
- `TensorFlow word2vec tutorial <https://www.tensorflow.org/versions/r0.9/tutorials/word2vec/index.html#vector-representations-of-words>`_
"""
# global data_index # you can put data_index outside the function, then
# modify the global data_index in the function without return it.
# note: without using yield, this code use data_index to instead.
assert batch_size % num_skips == 0
assert num_skips <= 2 * skip_window
batch = np.ndarray(shape=(batch_size), dtype=np.int32)
labels = np.ndarray(shape=(batch_size, 1), dtype=np.int32)
span = 2 * skip_window + 1 # [ skip_window target skip_window ]
buffer = collections.deque(maxlen=span)
for _ in range(span):
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
for i in range(batch_size // num_skips):
target = skip_window # target label at the center of the buffer
targets_to_avoid = [ skip_window ]
for j in range(num_skips):
while target in targets_to_avoid:
target = random.randint(0, span - 1)
targets_to_avoid.append(target)
batch[i * num_skips + j] = buffer[skip_window]
labels[i * num_skips + j, 0] = buffer[target]
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
return batch, labels, data_index
## Sampling functions
def sample(a=[], temperature=1.0):
"""Sample an index from a probability array.
Parameters
----------
a : a list
List of probabilities.
temperature : float or None
The higher the more uniform.\n
When a = [0.1, 0.2, 0.7],\n
temperature = 0.7, the distribution will be sharpen [ 0.05048273 0.13588945 0.81362782]\n
temperature = 1.0, the distribution will be the same [0.1 0.2 0.7]\n
temperature = 1.5, the distribution will be filtered [ 0.16008435 0.25411807 0.58579758]\n
If None, it will be ``np.argmax(a)``
Notes
------
No matter what is the temperature and input list, the sum of all probabilities will be one.
Even if input list = [1, 100, 200], the sum of all probabilities will still be one.
For large vocabulary_size, choice a higher temperature to avoid error.
"""
b = np.copy(a)
try:
if temperature == 1:
return np.argmax(np.random.multinomial(1, a, 1))
if temperature is None:
return np.argmax(a)
else:
a = np.log(a) / temperature
a = np.exp(a) / np.sum(np.exp(a))
return np.argmax(np.random.multinomial(1, a, 1))
except:
# np.set_printoptions(threshold=np.nan)
# print(a)
# print(np.sum(a))
# print(np.max(a))
# print(np.min(a))
# exit()
message = "For large vocabulary_size, choice a higher temperature\
to avoid log error. Hint : use ``sample_top``. "
warnings.warn(message, Warning)
# print(a)
# print(b)
return np.argmax(np.random.multinomial(1, b, 1))
def sample_top(a=[], top_k=10):
"""Sample from ``top_k`` probabilities.
Parameters
----------
a : a list
List of probabilities.
top_k : int
Number of candidates to be considered.
"""
idx = np.argpartition(a, -top_k)[-top_k:]
probs = a[idx]
# print("new", probs)
probs = probs / np.sum(probs)
choice = np.random.choice(idx, p=probs)
return choice
## old implementation
# a = np.array(a)
# idx = np.argsort(a)[::-1]
# idx = idx[:top_k]
# # a = a[idx]
# probs = a[idx]
# print("prev", probs)
# # probs = probs / np.sum(probs)
# # choice = np.random.choice(idx, p=probs)
# # return choice
## Vector representations of words (Advanced) UNDOCUMENT
class SimpleVocabulary(object):
"""Simple vocabulary wrapper, see create_vocab().
Parameters
------------
vocab : A dictionary of word to word_id.
unk_id : Id of the special 'unknown' word.
"""
def __init__(self, vocab, unk_id):
"""Initializes the vocabulary."""
self._vocab = vocab
self._unk_id = unk_id
def word_to_id(self, word):
"""Returns the integer id of a word string."""
if word in self._vocab:
return self._vocab[word]
else:
return self._unk_id
class Vocabulary(object):
"""Create Vocabulary class from a given vocabulary and its id-word, word-id convert,
see create_vocab() and ``tutorial_tfrecord3.py``.
Parameters
-----------
vocab_file : File containing the vocabulary, where the words are the first
whitespace-separated token on each line (other tokens are ignored) and
the word ids are the corresponding line numbers.
start_word : Special word denoting sentence start.
end_word : Special word denoting sentence end.
unk_word : Special word denoting unknown words.
Properties
------------
vocab : a dictionary from word to id.
reverse_vocab : a list from id to word.
start_id : int of start id
end_id : int of end id
unk_id : int of unk id
pad_id : int of padding id
Vocab_files
-------------
>>> Look as follow, includes `start_word` , `end_word` but no `unk_word` .
>>> a 969108
>>> <S> 586368
>>> </S> 586368
>>> . 440479
>>> on 213612
>>> of 202290
>>> the 196219
>>> in 182598
>>> with 152984
>>> and 139109
>>> is 97322
"""
def __init__(self,
vocab_file,
start_word="<S>",
end_word="</S>",
unk_word="<UNK>",
pad_word="<PAD>"):
if not tf.gfile.Exists(vocab_file):
tf.logging.fatal("Vocab file %s not found.", vocab_file)
tf.logging.info("Initializing vocabulary from file: %s", vocab_file)
with tf.gfile.GFile(vocab_file, mode="r") as f:
reverse_vocab = list(f.readlines())
reverse_vocab = [line.split()[0] for line in reverse_vocab]
assert start_word in reverse_vocab
assert end_word in reverse_vocab
if unk_word not in reverse_vocab:
reverse_vocab.append(unk_word)
vocab = dict([(x, y) for (y, x) in enumerate(reverse_vocab)])
print(" [TL] Vocabulary from %s : %s %s %s" % (vocab_file, start_word, end_word, unk_word))
print(" vocabulary with %d words (includes start_word, end_word, unk_word)" % len(vocab))
# tf.logging.info(" vocabulary with %d words" % len(vocab))
self.vocab = vocab # vocab[word] = id
self.reverse_vocab = reverse_vocab # reverse_vocab[id] = word
# Save special word ids.
self.start_id = vocab[start_wo
|
dyachan/django-usuario
|
setup.py
|
Python
|
mit
| 1,204
| 0.001661
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-usuario',
version='0.4',
packages=['usuario'],
|
include_package_data=True,
license='MIT License',
description='Extension to model User.',
long_description=README,
keywords = "django user",
url='https://github.com/dyachan/django-usuario',
author='Diego Yachan',
author_email='diego.yachan@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Develope
|
rs',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
amitu/djangothis
|
djangothis/__init__.py
|
Python
|
bsd-3-clause
| 64
| 0
|
from djangothis.app import re
|
ad_
|
yaml, read_yaml_file, watchfile
|
pombredanne/pythran
|
website/support.py
|
Python
|
bsd-3-clause
| 1,151
| 0
|
#!/usr/bin/env python
from pythran import tables
TITLE = "Supported Modules and Functions"
DEPTHS = '=*-+:~#.^"`'
print(DEPTHS[0]*len(TITLE))
print(TITLE)
print(DEPTHS[0]*len(TITLE))
print("")
def format_name(name):
if name.endswith('_') and not name.startswith('_'):
name = name[:-1]
return name
def isiterable(obj):
return hasattr(obj, '__iter__')
def dump_entry(entry_name, entry_value, depth):
if isiterable(entry_value):
print(entry_name)
print(DEPTHS[depth] * len(entry_name))
print("")
sym_entries, sub_entries = [], []
for sym in entry_value:
w = sub_entries if isiterable(entry_value[sym]) else sym_entries
w.append(sy
|
m)
for k in sorted(sym_entries):
dump_entry(format_name(k), entry_value[k], depth + 1)
print("")
for k in sorted(sub_entries):
dump_entry(format_name(k), entry_value[k], depth + 1)
print("")
el
|
se:
print(entry_name)
for MODULE in sorted(tables.MODULES):
if MODULE != '__dispatch__':
dump_entry(format_name(MODULE), tables.MODULES[MODULE], 1)
|
adminq80/Interactive_estimation
|
game/round/migrations/0014_auto_20161102_2154.py
|
Python
|
mit
| 509
| 0.001965
|
# -*- coding: utf-8 -*-
# Generated b
|
y Django 1.10.1 on 2016-11-02 21:54
from __future__ import unicode_literals
from decimal import Decimal
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('round', '0013_plot_batch'),
]
operations = [
migrations.AlterField(
model_name='round',
name='score',
field=models.DecimalField(decimal_places=2, default
|
=Decimal('0'), max_digits=4),
),
]
|
botswana-harvard/ba-namotswe
|
ba_namotswe/tests/test_enrollment_model.py
|
Python
|
gpl-3.0
| 3,230
| 0.002786
|
from django.test.testcases import TestCase
from django.utils import timezone
from edc_visit_schedule.site_visit_schedules import site_visit_schedules
from edc_visit_tracking.constants import SCHEDULED
from ba_namotswe.models import Appointment, SubjectVisit, RequisitionMetadata, CrfMetadata, SubjectConsent, RegisteredSubject
from .factories.enrollment_factory import EnrollmentFactory
class TestEnrollment(TestCase):
def test_create_enrollment(self):
"""Assert enrollment creates subject consent and appointments."""
enrollment = EnrollmentFactory()
schedule = site_visit_schedules.get_schedule(enrollment._meta.label_lower)
self.assertEqual(SubjectConsent.objects.all().count(), 1)
self.assertGreater(Appointment.objects.all().count(), 0)
self.assertEqual(Appointment.objects.all().count(), len(schedule.visits))
# def test_create_enrollment_bad_dob(self):
# """Assert enrollment creates subject consent and appointments."""
# EnrollmentFactory(dob=date(1900, 1, 1))
def test_subject_identifier(self):
"""Assert enrollment subject_identifier is updated after consent is created."""
enrollment = EnrollmentFactory()
self.assertIsNotNone(enrollment.subject_identifier)
SubjectConsent.objects.get(subject_identifier=enrollment.subject_identifier)
RegisteredSubject.objects.get(subject_identifier=enrollment.subject_identifier)
def test_subject_consent_attrs(self):
"""Assert attrs from enrollment match subject_consent."""
enrollment = EnrollmentFactory()
subject_consent = SubjectConsent.objects.get(subject_identifier=enrollment.subject_identifier)
subject_consent.dob = enrollment.dob
subject_consent.initials = enrollment.initials
subject_consent.consent_datetime = enrollment.report_datetime
subject_consent.gender = enrollment.gender
def test_registered_subject_attrs(self):
"""Assert attrs from enrollment match registered_subject."""
enrollment = EnrollmentFactory()
registered_subject = RegisteredSubject.objects.get(subject_identifier=enrollment.subject_identifier)
registered_subject.dob = enrollment.dob
registered_subject.initials = enrollment.initials
registered_subject.consent_datetime = enrollment.report_datetime
registered_subject.gender = enrollment.gender
def test_create_subject_visit(self):
"""Assert subject visit creates metadata."""
E
|
nrollmentFactory()
appointment = Appointment.objects.all().order_by('visit_code').first()
SubjectVisit.objects.create(
appointment=appointment,
|
report_datetime=timezone.now(),
reason=SCHEDULED,
)
schedule = site_visit_schedules.get_schedule(appointment.schedule_name)
visit = schedule.get_visit(appointment.visit_code)
self.assertGreater(CrfMetadata.objects.all().count(), 0)
self.assertEqual(CrfMetadata.objects.all().count(), len(visit.crfs))
self.assertGreater(RequisitionMetadata.objects.all().count(), 0)
self.assertEqual(RequisitionMetadata.objects.all().count(), len(visit.requisitions))
|
dilynfullerton/tr-A_dependence_plots
|
src/deprecated/nushellx_lpt/metafitter_abs.py
|
Python
|
cc0-1.0
| 4,879
| 0
|
"""nushellx_lpt/metafitter_abs.py
Function definitions for an abstract *.lpt metafitter
"""
from __future__ import print_function, division, unicode_literals
import numpy as np
from deprecated.int.metafitter_abs import single_particle_metafit_int
from constants import DPATH_SHELL_RESULTS, DPATH_PLOTS
from deprecated.nushellx_lpt.DataMapNushellxLpt import DataMapNushellxLpt
from plotting import map_to_arrays
from transforms import pzbt
# noinspection PyUnusedLocal
# TODO: Use of zbt_array as a constant in this function is a hack.
# TODO: There should be well-defined methods for accessing zero-body term data
def _get_plot_lpt(n, exp, me_map, mzbt_map, *args):
"""Gets the energy vs. mass plot (x, y, const_list, const_dict) based
on the given mass -> energy map, mass -> zero body map, etc
:param n: state index (beginning at 1) from first column of *.lpt file
:param exp: ExpNushellxLpt, which identifies the data being used for the
plot
:param me_map: mass number A -> enery map, where energy is that derived
from the *.lpt file (without addition of zero body term)
:param mzbt_map: mass number A -> zero body term map, derived from the
interaction files
:param args: allows extra args from compatibility (i.e. duck-typing).
These are not used here
:ret
|
urn: (x, y, const_list, const_dict), where const_list and const_dict
contain exp, n, and zbt array
"""
x, y = map_to_arrays(me_map)
zbt_list = list()
x_arr, zbt_arr = map_to_arrays(mzbt_map)
for xa, zbta, i in zip(x_arr, zbt_arr, range(len(
|
x_arr))):
if xa in x:
zbt_list.append(zbta)
zbt_arr_fixed = np.array(zbt_list)
const_list = [exp, n, np.array(zbt_arr_fixed)]
const_dict = {'exp': exp, 'N': n, 'zbt_arr': zbt_arr_fixed}
return x, y, const_list, const_dict
# noinspection PyUnusedLocal
def _get_plots_lpt(exp_list, data_map, get_data_fn, get_plot_fn=_get_plot_lpt,
**kwargs):
"""Gets a list of plot based on the given exp_list
:param exp_list: list of exp values for which to get plots
:param data_map: DataMapNushellxLpt object containing data for all
of the exp in exp_list
:param get_data_fn: function to retrieve n -> mass -> energy map from
a value (DatumLpt) in data_map
:param get_plot_fn: function to use to make plot from n, exp,
mass -> energy map, and mass -> zero body term map
:param kwargs: other arguments for compatibility (duck-typing). These
are not used here.
:return: list of plot, where a plot is (x, y, const_list, const_dict)
"""
plots = list()
if exp_list is not None:
exps = exp_list
else:
exps = data_map.map.keys()
for exp in sorted(exps):
datum = data_map[exp]
nme_map = get_data_fn(datum)
mzbt_map = datum.mass_to_zbt_map()
for n, me_map in nme_map.items():
plots.append(get_plot_fn(n, exp, me_map, mzbt_map))
return plots
# noinspection PyUnusedLocal
# TODO: Get rid of the need for this function. Top level functions should
# TODO: deal with labels, etc. The metafitters should be as minimalistic as
# TODO: possible.
def _get_label_kwargs_lpt(plot, idx_key=None):
"""Function to get a dictionary for the label keyword arguments for
formatting
:param plot: (x, y, const_list, const_dict)
:param idx_key: I do not even remember what the point of this argument is.
"""
return {'exp': plot[3]['exp'], 'N': plot[3]['N']}
def metafit_nushellx_lpt(
fitfn, exp_list,
transform=pzbt,
exp_filter_fn=None,
xlabel='A', ylabel='Energy + Zero Body Term (MeV)',
show_fit=False,
_sourcedir=DPATH_SHELL_RESULTS, _savedir=DPATH_PLOTS,
_data_map=DataMapNushellxLpt,
_get_data=lambda dm: dm.n_to_mass_to_ex_energy_map(),
_get_plots=_get_plots_lpt, _get_plot=_get_plot_lpt,
_plot_sort_key=lambda p: p[3]['exp'],
_code_pref='LPT',
_title='Metafit {mfn} for shell calculation {tr} data using {fn}',
_label='{N}, {exp}', _get_label_fmt_kwargs=_get_label_kwargs_lpt,
_print_results=False,
_idx='N',
**kwargs
):
"""See the documentation for single_particle_metafit_int
(int/metafitter_abs.py)
"""
return single_particle_metafit_int(
fitfn=fitfn, exp_list=exp_list, exp_filter_fn=exp_filter_fn,
dpath_sources=_sourcedir, dpath_plots=_savedir,
transform=transform,
xlabel=xlabel, ylabel=ylabel,
show_fit=show_fit,
_data_map=_data_map, _get_data=_get_data, _get_plots=_get_plots,
_get_plot=_get_plot, _plot_sort_key=_plot_sort_key,
_title=_title, _label=_label, _idx=_idx,
print_results=_print_results,
_get_label_fmt_kwargs=_get_label_fmt_kwargs,
_code_pref=_code_pref,
**kwargs)
|
cindywang0728/adminset
|
setup/ansible.py
|
Python
|
apache-2.0
| 8,413
| 0.000713
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from subprocess import Popen, PIPE
from cmdb.models import Host, HostGroup
from django.shortcuts import render
from django.http import HttpResponse
import os
from config.views import get_dir
from django.contrib.auth.decorators import login_required
from accounts.permission import permission_verify
import logging
from lib.log import log
from lib.setup import get_playbook, get_roles
# var info
ansible_dir = get_dir("a_path")
roles_dir = get_dir("r_path")
playbook_dir = get_dir("p_path")
level = get_dir("log_level")
log_path = get_dir("log_path")
log("setup.log", level, log_path)
def write_role_vars(roles, vargs):
r_vars = vargs.split('\r\n')
for r in roles:
if vargs:
if os.path.exists(roles_dir+r+"/vars"):
pass
else:
os.mkdir(roles_dir+r+"/vars")
with open(roles_dir+r+'/vars/main.yml', 'wb+') as role_file:
role_file.writelines("---\n")
for x in r_vars:
rs = x + '\n'
role_file.writelines(rs)
return True
@login_required()
@permission_verify()
def index(request):
temp_name = "setup/setup-header.html"
all_host = Host.objects.all()
all_dir = get_roles(roles_dir)
all_pbook = get_playbook(playbook_dir)
all_group = HostGroup.objects.all()
return render(request, 'setup/ansible.html', locals())
@login_required()
@permission_verify()
def playbook(request):
ret = []
temp_name = "setup/setup-header.html"
if os.path.exists(ansible_dir + '/gexec.yml'):
os.remove(ansible_dir + '/gexec.yml')
else:
pass
if request.method == 'POST':
host = request.POST.getlist('mserver', [])
group = request.POST.getlist('mgroup', [])
pbook = request.POST.getlist('splaybook', [])
roles = request.POST.getlist('mroles', [])
role_vars = request.POST.get('mvars')
if host:
if roles:
if role_vars:
write_role_vars(roles, role_vars)
for h in host:
logging.info("==========ansible tasks start==========")
logging.info("User:"+request.user.username)
logging.info("host:"+h)
with open(ansible_dir + '/gexec.yml', 'w+') as f:
flist = ['- hosts: '+h+'\n', ' remote_user: root\n', ' gather_facts: true\n', ' roles:\n']
for r in roles:
rs = ' - ' + r + '\n'
flist.append(rs)
logging.info("Role:"+r)
f.writelines(flist)
cmd = "ansible-playbook"+" " + ansible_dir+'/gexec.yml'
p = Popen(cmd, stderr=PIPE, stdout=PIPE, shell=True)
data = p.communicate()
ret.append(data)
for d in data:
logging.info(d)
logging.info("==========ansible tasks end============")
else:
for h in host:
for p in pbook:
f = open(playbook_dir + p, 'r+')
flist = f.readlines()
flist[0] = '- hosts: '+h+'\n'
f = open(playbook_dir + p, 'w+')
f.writelines(flist)
f.close()
cmd = "ansible-playbook"+" " + playbook_dir + p
pcmd = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
data = pcmd.communicate()
ret.append(data)
logging.info("==========ansible tasks start==========")
logging.info("User:"+request.user.username)
logging.info("host:"+h)
logging.info("Playbook:"+p)
for d in data:
logging.info(d)
logging.info("==========ansible tasks end============")
return render(request, 'setup/result.html', locals())
if group:
if roles:
if role_vars:
write_role_vars(roles, role_vars)
for g in group:
logging.info("==========ansible tasks start==========")
logging.info("User:"+request.user.username)
logging.info("group:"+g)
f = open(ansible_dir + '/gexec.yml', 'w+')
flist = ['- hosts: '+g+'\n', ' remote_user: root\n', ' gather_facts: true\n', ' roles:\n']
for r in roles:
rs = ' - ' + r + '\n'
flist.append(rs)
logging.info("Role:"+r)
f.writelines(flist)
f.close()
cmd = "ansible-playbook"+" " + ansible_dir+'/gexec.yml'
p = Popen(cmd, stderr=PIPE, stdout=PIPE, shell=True)
data = p.communicate()
ret.append(data)
for d in data:
logging.info(d)
logging.info("==========ansible tasks end============")
else:
for g in group:
for p in pbook:
f = open(playbook_dir + p, 'r+')
flist = f.readlines()
flist[0] = '- hosts: '+g+'\n'
f = open(playbook_dir + p, 'w+')
f.writelines(flist)
f.close()
cmd = "ansible-playbook"+" " + playbook_dir + p
pcmd = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
data = pcmd.communicate()
ret.append(data)
logging.info("==========ansible tasks start==========")
logging.info("User:"+request.user.username)
logging.info("Group:"+g)
logging.info("Playbook:"+p)
for d in data:
logging.info(d)
logging.info("==========ansible tasks end============")
return render(request, 'setup/result.html', locals())
@login_required()
@permission_verify()
def ansible_command(request):
command_list = []
ret = []
count = 1
temp_name = "setup/setup-header.html"
if request.method == 'POST':
mcommand = request.POST.get('mcommand')
command_list = mcommand.split('\n')
for command in command_list:
if command.startswith("ansible"):
p = Popen(command, stdout=PIPE, stderr=PIPE,shell=True)
data = p.communicate()
ret.append(data)
else:
data = "your command " + str(count) + " is invalid!"
ret.append(data)
count += 1
logging.info("==========ansible tasks start==========")
logging.info("User:"+request.user.username)
logging.info("command:"+command)
for d in data:
logging.info(d)
logging.info("==========ansible tasks end============")
return
|
render(request, 'setup/result.html', locals())
@login_required()
@permission_verify()
def host_sync(request):
group = HostGroup.objects.all()
ansible_file = open(ansible_dir+"/hosts", "wb")
all_host = Host.objects.all()
for host in all_host:
#gitlab ansible_host=10.100.1.76 host_name=gitlab
host_item = host.hostname+" "+"ansible_host="+host.ip+" "+"host_name="+host.hostname+"\n"
ansible
|
_file.write(host_item)
for g in group:
group_name = "["+g.name+"]"+"\n"
ansible_file.write(group_name)
members = Host.objects.filter(group__name=g)
for m in members:
group_item = m.hostname+"\n"
ansible_file.write(group_item)
ansible_file.close()
logging.info("====
|
jrappen/sublime-distractionless
|
main.py
|
Python
|
isc
| 195
| 0
|
#!/usr/bin/env pyth
|
on
# coding: utf-
|
8
from .src import *
def plugin_loaded():
distractionless.plugin_loaded(reload=False)
def plugin_unloaded():
distractionless.plugin_unloaded()
|
pvagner/orca
|
test/keystrokes/java/role_radio_button.py
|
Python
|
lgpl-2.1
| 16,318
| 0.003003
|
#!/usr/bin/python
"""Test of radio buttons in Java's SwingSet2."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
##########################################################################
# We wait for the demo to come up and for focus to be on the toggle button
#
#sequence.append(WaitForWindowActivate("SwingSet2",None))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
# Wait for entire window to get populated.
sequence.append(PauseAction(5000))
##########################################################################
# Tab over to the button demo, and activate it.
#
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(TypeAction(" "))
##########################################################################
# Tab all the way down to the button page tab.
#
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TOGGLE_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Button Demo", acc_role=pyatspi.ROLE_PAGE_TAB))
sequence.append(KeyComboAction("Tab"))
##########################################################################
# Select Check Boxes tab
#
sequence.append(WaitForFocus("Buttons", acc_role=pyatspi.ROLE_PAGE_TAB))
sequence.append(KeyComboAction("Right"))
sequence.append(WaitForFocus("Radio Buttons", acc_role=pyatspi.ROLE_PAGE_TAB))
sequence.append(PauseAction(5000))
##########################################################################
# Tab into check boxes container
#
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Radio One ", acc_role=pyatspi.ROLE_RADIO_BUTTON))
sequence.append(utils.AssertPresentationAction(
"1. Move to Radio One radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Bu
|
tton Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Text Radio Buttons Panel Text Radio Buttons & y Radio One RadioButton'",
" VISIBLE: '& y Radio One RadioButton', cursor=1",
"SPEECH OUTPUT: 'Text Radio Buttons panel Radio One not selected radio button'"]))
##
|
######################################################################
# Expected output when radio button is selected.
#
# BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Radio Buttons TabList Radio Buttons Text Radio Buttons Panel &=y Radio One RadioButton'
# VISIBLE: '&=y Radio One RadioButton', cursor=1
#
# SPEECH OUTPUT: 'selected'
sequence.append(utils.StartRecordingAction())
sequence.append(TypeAction(" "))
sequence.append(WaitAction("object:property-change:accessible-value", None,
None, pyatspi.ROLE_RADIO_BUTTON, 5000))
sequence.append(utils.AssertPresentationAction(
"2. Select the focused radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Text Radio Buttons Panel Text Radio Buttons &=y Radio One RadioButton'",
" VISIBLE: '&=y Radio One RadioButton', cursor=1",
"SPEECH OUTPUT: 'selected'"]))
########################################################################
# Expected output when radio button comes into focus.
#
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Radio Two", acc_role=pyatspi.ROLE_RADIO_BUTTON))
sequence.append(utils.AssertPresentationAction(
"3. Move to Radio Two radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Text Radio Buttons Panel Text Radio Buttons & y Radio Two RadioButton'",
" VISIBLE: '& y Radio Two RadioButton', cursor=1",
"SPEECH OUTPUT: 'Radio Two not selected radio button'"]))
########################################################################
# Expected output when radio button is selected.
#
sequence.append(utils.StartRecordingAction())
sequence.append(TypeAction(" "))
sequence.append(WaitAction("object:property-change:accessible-value", None,
None, pyatspi.ROLE_RADIO_BUTTON, 5000))
sequence.append(utils.AssertPresentationAction(
"4. Select the focused radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Text Radio Buttons Panel Text Radio Buttons &=y Radio Two RadioButton'",
" VISIBLE: '&=y Radio Two RadioButton', cursor=1",
"SPEECH OUTPUT: 'selected'"]))
########################################################################
# Expected output when radio button comes into focus.
#
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Radio Three", acc_role=pyatspi.ROLE_RADIO_BUTTON))
sequence.append(utils.AssertPresentationAction(
"5. Move to Radio Three radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Text Radio Buttons Panel Text Radio Buttons & y Radio Three RadioButton'",
" VISIBLE: '& y Radio Three RadioButton', cursor=1",
"SPEECH OUTPUT: 'Radio Three not selected radio button'"]))
########################################################################
# Expected output when radio button is selected.
#
sequence.append(utils.StartRecordingAction())
sequence.append(TypeAction(" "))
sequence.append(WaitAction("object:property-change:accessible-value", None,
None, pyatspi.ROLE_RADIO_BUTTON, 5000))
sequence.append(utils.AssertPresentationAction(
"6. Select the focused radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Text Radio Buttons Panel Text Radio Buttons &=y Radio Three RadioButton'",
" VISIBLE: '&=y Radio Three RadioButton', cursor=1",
"SPEECH OUTPUT: 'selected'"]))
########################################################################
# Expected output when radio button comes into focus.
#
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Radio One ", acc_role=pyatspi.ROLE_RADIO_BUTTON))
sequence.append(utils.AssertPresentationAction(
"7. Move to Radio One radio button",
["BRAILLE LINE: 'SwingSet2 A
|
grlee77/scipy
|
scipy/linalg/tests/test_interpolative.py
|
Python
|
bsd-3-clause
| 8,973
| 0.000223
|
#******************************************************************************
# Copyright (C) 2013 Kenneth L. Ho
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer. Redistributions in binary
# form must reproduce the above copyright notice, this list of conditions and
# the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# None of the names of the copyright holders may be used to endorse or
# promote products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#******************************************************************************
import scipy.linalg.interpolative as pymatrixid
import numpy as np
from scipy.linalg import hilbert, svdvals, norm
from scipy.sparse.linalg import aslinearoperator
from scipy.linalg.interpolative import interp_decomp
import itertools
from numpy.testing import (assert_, assert_allclose, assert_equal,
assert_array_equal)
import pytest
from pytest import raises as assert_raises
import sys
_IS_32BIT = (sys.maxsize < 2**32)
@pytest.fixture()
def eps():
yield 1e-12
@pytest.fixture(params=[np.float64, np.complex128])
def A(request):
# construct Hilbert matrix
# set parameters
n = 300
yield hilbert(n).astype(request.param)
@pytest.fixture()
def L(A):
yield aslinearoperator(A)
@pytest.fixture()
def rank(A, eps):
S = np.linalg.svd(A, compute_uv=False)
try:
rank = np.nonzero(S < eps)[0][0]
except IndexError:
rank = A.shape[0]
return rank
class TestInterpolativeDecomposition:
@pytest.mark.parametrize(
"rand,lin_op",
[(False, False), (True, False), (True, True)])
def test_real_id_fixed_precision(self, A, L, eps, rand, lin_op):
if _IS_32BIT and A.dtype == np.complex_ and rand:
pytest.xfail("bug in external fortran code")
# Test ID routines on a Hilbert matrix.
A_or_L = A if not lin_op else L
k, idx, proj = pymatrixid.interp_decomp(A_or_L, eps, rand=rand)
B = pymatrixid.reconstruct_matrix_from_id(A[:, idx[:k]], idx, proj)
assert_allclose(A, B, rtol=eps, atol=1e-08)
@pytest.mark.parametrize(
"rand,lin_op",
[(False, False), (True, False), (True, True)])
def test_real_id_fixed_rank(self, A, L, eps, rank, rand, lin_op):
if _IS_32BIT and A.dtype == np.complex_ and rand:
pytest.xfail("bug in external fortran code")
k = rank
A_or_L = A if not lin_op else L
idx, proj = pymatrixid.interp_decomp(A_or_L, k, rand=rand)
B = pymatrixid.reconstruct_matrix_from_id(A[:, idx[:k]], idx, proj)
assert_allclose(A, B, rtol=eps, atol=1e-08)
@pytest.mark.parametrize("rand,lin_op", [(False, False)])
def test_real_id_skel_and_interp_matrices(
self, A, L, eps, rank, rand, lin_op):
k = rank
A_or_L = A if not lin_op else L
idx, proj = pymatrixid.interp_decomp(A_or_L, k, rand=rand)
P = pymatrixid.reconstruct_interp_matrix(idx, proj)
B = pymatrixid.reconstruct_skel_matrix(A, k, idx)
assert_allclose(B, A[:, idx[:k]], rtol=eps, atol=1e-08)
assert_allclose(B @ P, A, rtol=eps, atol=1e-08)
@pytest.mark.parametrize(
"rand,lin_op",
[(False, False), (True, False), (True, True)])
def test_svd_fixed_precison(self, A, L, eps, rand, lin_op):
if _IS_32BIT and A.dtype == np.complex_ and rand:
pytest.xfail("bug in external fortran code")
A_or_L = A if not lin_op else L
U, S, V = pymatrixid.svd(A_or_L, eps, rand=rand)
B = U * S @ V.T.conj()
assert_allclose(A, B, rtol=eps, atol=1e-08)
@pytest.mark.parametrize(
"rand,lin_op",
[(False, False), (True, False), (True, True)])
def test_svd_fixed_rank(self, A, L, eps, rank, rand, lin_op):
if _IS_32BIT and A.dtype == np.complex_ and rand:
pytest.xfail("bug in external fortran code")
k = rank
A_or_L = A if not lin_op else L
U, S, V = pymatrixid.svd(A_or_L, k, rand=rand)
B = U * S @ V.T.conj()
assert_allclose(A, B, rtol=eps, atol=1e-08)
def test_id_to_svd(self, A, eps, rank):
k = rank
idx, proj = pymatrixid.interp_decomp(A, k, rand=False)
U, S, V = pymatrixid.id_to_svd(A[:, idx[:k]], idx, proj)
B = U * S @ V.T.conj()
assert_allclose(A, B, rtol=eps, atol=1e-08)
def test_estimate_spectral_norm(self, A):
s = svdvals(A)
norm_2_est = pymatrixid.estimate_spectral_norm(A)
assert_allclose(norm_2_est, s[0], rtol=1e-6, atol=1e-8)
def test_estimate_spectral_norm_diff(self, A):
B = A.copy()
B[:, 0] *= 1.2
s = svdvals(A - B)
norm_2_est = pymatrixid.estimate_spectral_norm_diff(A, B)
assert_allclose(norm_2_est, s[0], rtol=1e-6, atol=1e-8)
def test_rank_estimates_array(self, A):
B = np.array([[1, 1, 0], [0, 0, 1], [0, 0, 1]], dtype=A.dtype)
for M in [A, B]:
rank_tol = 1e-9
rank_np = np.linalg.matrix_rank(M, norm(M, 2) * rank_tol)
rank_est = pymatrixid.estimate_ran
|
k(M, rank_tol)
assert_(rank_est >= rank_np)
assert_(rank_est <= rank_np + 10)
def test_rank_estimates_lin_op(self, A):
B = np.array([[1, 1, 0], [0, 0, 1], [0, 0, 1]], dtype=A.dtype)
for M in [A, B]:
ML = aslinearoperator(M)
rank_tol = 1e-9
rank_np = np.linalg.matrix_rank(M, norm(M, 2) * rank_tol)
|
rank_est = pymatrixid.estimate_rank(ML, rank_tol)
assert_(rank_est >= rank_np - 4)
assert_(rank_est <= rank_np + 4)
def test_rand(self):
pymatrixid.seed('default')
assert_allclose(pymatrixid.rand(2), [0.8932059, 0.64500803],
rtol=1e-4, atol=1e-8)
pymatrixid.seed(1234)
x1 = pymatrixid.rand(2)
assert_allclose(x1, [0.7513823, 0.06861718], rtol=1e-4, atol=1e-8)
np.random.seed(1234)
pymatrixid.seed()
x2 = pymatrixid.rand(2)
np.random.seed(1234)
pymatrixid.seed(np.random.rand(55))
x3 = pymatrixid.rand(2)
assert_allclose(x1, x2)
assert_allclose(x1, x3)
def test_badcall(self):
A = hilbert(5).astype(np.float32)
with assert_raises(ValueError):
pymatrixid.interp_decomp(A, 1e-6, rand=False)
def test_rank_too_large(self):
# svd(array, k) should not segfault
a = np.ones((4, 3))
with assert_raises(ValueError):
pymatrixid.svd(a, 4)
def test_full_rank(self):
eps = 1.0e-12
# fixed precision
A = np.random.rand(16, 8)
k, idx, proj = pymatrixid.interp_decomp(A, eps)
assert_equal(k, A.shape[1])
P = pymatrixid.reconstruct_interp_matrix(idx, proj)
B = pymatrixid.reconstruct_skel_matrix(A, k, idx)
assert_allclose(A, B @ P)
# fixed rank
idx, proj = pymatrixid.interp_decomp(A, k)
P = pymatrixid.reconstruct_int
|
JensRantil/cabot
|
app/celeryconfig.py
|
Python
|
mit
| 703
| 0
|
import os
from datetime import timedelta
BROKER_URL = os.environ['CELERY_BROKER_URL']
CELERY_IMPORTS
|
= ('app.cabotapp.tasks', )
CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
CELERY_TASK_SERIALIZER = "json"
CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERYBEAT_SCHEDULE = {
'run-all-checks': {
'task': 'app.cabotapp.tasks.run_all_checks',
'schedule': timedelta(seconds=60),
},
'update-shifts': {
'task': 'app.cabot
|
app.tasks.update_shifts',
'schedule': timedelta(seconds=1800),
},
'clean-db': {
'task': 'app.cabotapp.tasks.clean_db',
'schedule': timedelta(seconds=60*60*24),
},
}
CELERY_TIMEZONE = 'UTC'
|
germanop/sm
|
tests/test_SMBSR.py
|
Python
|
lgpl-2.1
| 3,181
| 0.006602
|
import unittest
import mock
import SMBSR
import xs_errors
import XenAPI
import vhdutil
import util
import errno
class FakeSMBSR(SMBSR.SMBSR):
uuid = None
sr_ref = None
mountpoint = None
linkpath = None
path = None
session = None
remoteserver = None
def __init__(self, srcmd, none):
self.dconf = srcmd.dconf
self.srcmd = srcmd
self.uuid = 'auuid'
self.sr_ref = 'asr_ref'
self.mountpoint = 'aMountpoint'
self.linkpath = 'aLinkpath'
self.path = 'aPath'
self.remoteserver = 'aRemoteserver'
class Test_SMBSR(unittest.TestCase):
def create_smbsr(self, sr_uuid='asr_uuid', server='\\aServer', serverpath = '/aServerpath', username = 'aUsername',
|
password = 'aPassword'):
srcmd = mock.Mock()
srcmd.dconf = {
'server': server,
'serverpath': serverpath,
'username': username,
'password': password
}
srcmd.params = {
'command': 'some_command',
'device_config': {}
}
smbsr
|
= FakeSMBSR(srcmd, None)
smbsr.load(sr_uuid)
return smbsr
#Attach
@mock.patch('SMBSR.SMBSR.checkmount')
@mock.patch('SMBSR.SMBSR.mount')
def test_attach_smbexception_raises_xenerror(self, mock_mount, mock_checkmount):
smbsr = self.create_smbsr()
mock_mount = mock.Mock(side_effect=SMBSR.SMBException("mount raised SMBException"))
mock_checkmount = mock.Mock(return_value=False)
try:
smbsr.attach('asr_uuid')
except Exception, exc:
self.assertTrue(isinstance(exc,xs_errors.XenError))
@mock.patch('SMBSR.SMBSR.checkmount')
def test_attach_if_mounted_then_attached(self, mock_checkmount):
smbsr = self.create_smbsr()
mock_checkmount = mock.Mock(return_value=True)
smbsr.attach('asr_uuid')
self.assertTrue(smbsr.attached)
#Detach
@mock.patch('SMBSR.SMBSR.unmount')
def test_detach_smbexception_raises_xenerror(self,mock_unmount):
smbsr = self.create_smbsr()
mock_unmount = mock.Mock(side_effect=SMBSR.SMBException("unmount raised SMBException"))
try:
smbsr.detach('asr_uuid')
except Exception, exc:
self.assertTrue(isinstance(exc,xs_errors.XenError))
@mock.patch('SMBSR.SMBSR.checkmount',return_value=False)
def test_detach_not_detached_if_not_mounted(self, mock_checkmount):
smbsr = self.create_smbsr()
smbsr.attached = True
mock_checkmount = mock.Mock(return_value=False)
smbsr.detach('asr_uuid')
self.assertTrue(smbsr.attached)
#Mount
@mock.patch('util.isdir')
def test_mount_mountpoint_isdir(self, mock_isdir):
mock_isdir = mock.Mock(side_effect=util.CommandException(errno.EIO, "Not a directory"))
smbsr = self.create_smbsr()
try:
smbsr.mount()
except Exception, exc:
self.assertTrue(isinstance(exc,SMBSR.SMBException))
def test_mount_mountpoint_empty_string(self):
smbsr = self.create_smbsr()
self.assertRaises(SMBSR.SMBException, smbsr.mount, "")
|
xrg/openerp-server
|
bin/tools/amount_to_text.py
|
Python
|
agpl-3.0
| 7,774
| 0.012223
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#.apidoc title: Amount to Text
#-------------------------------------------------------------
# French
#-------------------------------------------------------------
to_19_fr = ( 'zéro', 'un', 'deux', 'trois', 'quatre', 'cinq', 'six',
'sept', 'huit', 'neuf', 'dix', 'onze', 'douze', 'treize',
'quatorze', 'quinze', 'seize', 'dix-sept', 'dix-huit', 'dix-neuf' )
tens_fr = ( 'vingt', 'trente', 'quarante', 'Cinquante', 'Soixante', 'Soixante-dix', 'Quatre-vingts', 'Quatre-vingt Dix')
denom_fr = ( '',
'Mille', 'Millions', 'Milliards', 'Billions', 'Quadrillions',
'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion',
'Décillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Icosillion', 'Vigintillion' )
def _convert_nn_fr(val):
""" convert a value < 100 to French
"""
if val < 20:
return to_19_fr[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_fr)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_fr[val % 10]
return dcap
def _convert_nnn_fr(val):
""" convert a value < 1000 to french
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_fr[rem] + ' Cent'
if mod > 0:
word = word + ' '
if mod > 0:
word = word + _convert_nn_fr(
|
mod)
return word
def french_number(val):
if val < 100:
return _convert_nn_fr(val)
if val < 1000:
return _convert_nnn_fr(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_fr))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_fr(l) + ' ' + denom_fr[didx]
if r > 0:
ret = ret + ', ' + french_number(r)
return
|
ret
def amount_to_text_fr(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = french_number(abs(int(list[0])))
end_word = french_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and ' Cents' or ' Cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Dutch
#-------------------------------------------------------------
to_19_nl = ( 'Nul', 'Een', 'Twee', 'Drie', 'Vier', 'Vijf', 'Zes',
'Zeven', 'Acht', 'Negen', 'Tien', 'Elf', 'Twaalf', 'Dertien',
'Veertien', 'Vijftien', 'Zestien', 'Zeventien', 'Achttien', 'Negentien' )
tens_nl = ( 'Twintig', 'Dertig', 'Veertig', 'Vijftig', 'Zestig', 'Zeventig', 'Tachtig', 'Negentig')
denom_nl = ( '',
'Duizend', 'Miljoen', 'Miljard', 'Triljoen', 'Quadriljoen',
'Quintillion', 'Sextiljoen', 'Septillion', 'Octillion', 'Nonillion',
'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' )
def _convert_nn_nl(val):
""" convert a value < 100 to Dutch
"""
if val < 20:
return to_19_nl[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_nl)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_nl[val % 10]
return dcap
def _convert_nnn_nl(val):
""" convert a value < 1000 to Dutch
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_nl[rem] + ' Honderd'
if mod > 0:
word = word + ' '
if mod > 0:
word = word + _convert_nn_nl(mod)
return word
def dutch_number(val):
if val < 100:
return _convert_nn_nl(val)
if val < 1000:
return _convert_nnn_nl(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_nl))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_nl(l) + ' ' + denom_nl[didx]
if r > 0:
ret = ret + ', ' + dutch_number(r)
return ret
def amount_to_text_nl(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = dutch_number(int(list[0]))
end_word = dutch_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and 'cent' or 'cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Generic functions
#-------------------------------------------------------------
_translate_funcs = {'fr' : amount_to_text_fr, 'nl' : amount_to_text_nl}
def add_amount_to_text_function(lang, func):
_translate_funcs[lang] = func
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='fr', currency='euro'):
""" Converts an integer to its textual representation, using the language set in the context if any.
Example::
1654: mille six cent cinquante-quatre.
"""
# if nbr > 1000000:
##TODO: use logger
# print "WARNING: number too large '%d', can't translate it!" % (nbr,)
# return str(nbr)
if not _translate_funcs.has_key(lang):
#TODO: use logger
print "WARNING: no translation function found for lang: '%s'" % (lang,)
#TODO: (default should be en) same as above
lang = 'fr'
return _translate_funcs[lang](abs(nbr), currency)
if __name__=='__main__':
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", amount_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", amount_to_text(i, lang)
else:
print amount_to_text(int(argv[1]), lang)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
nicolaszein/chat
|
chat/models.py
|
Python
|
bsd-3-clause
| 2,028
| 0.00642
|
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.utils.text import slugify
from django.contrib.auth.models import User
ROOM_TYPE = (
(1, u'Privado'),
(2, u'Grupo'),
)
class Room(models.Model):
name = models.TextField(u'Nome')
label = models.SlugField(u'Label', unique=True, blank=True, null=True)
type = models.SmallIntegerField(u'Tipo', choices=ROOM_TYPE)
users = models.ManyToManyField(User, verbose_name=u'Usuários', related_name='room_users')
@models.permalink
def get_absolute_url(self):
return 'chat_room', (self.label, )
def save(self, *args, **kwargs):
self.label = slugify(self.name)
return super(Room, self).save(*args, **kwargs)
def __unicode__(self):
return self.label
class Message(models.Model):
room = models.ForeignKey(Room, verbose_name=u'Sala', related_name='messages')
handle = models.ForeignKey(User, verbose_name=u'Usuário', related_name='mes
|
sage_user')
message = models.TextField()
timestamp = models.DateTimeField(default=timezone.now, db_index=True)
def __unicode__(self):
return '[{timestamp}] {handle}: {message}'.format(**self.as_dict())
@property
def formatted_timestamp(self):
return self.timestamp.strftime('%d/%m/%Y %-I:%M')
def as_dict(self):
return {'handle': "%s - %s" % (self.handle.id, self.handle.get_full_na
|
me()), 'message': self.message, 'timestamp': self.formatted_timestamp}
SEXO = (
(1, u'Masculino'),
(2, u'Feminino'),
)
class UserProfile(models.Model):
user = models.ForeignKey(User, verbose_name=u'Usuário', related_name='profile_user')
avatar = models.ImageField(u'Foto', max_length=255, upload_to='user_profile', blank=True, null=True)
data_nascimento = models.DateField(u'Data Nascimento')
sexo = models.IntegerField(u'Sexo', choices=SEXO)
def __unicode__(self):
return unicode(self.user.first_name)
|
maximeolivier/pyCAF
|
pycaf/architecture/devices/__init__.py
|
Python
|
gpl-3.0
| 378
| 0
|
"""
Import all modul
|
es and packages in the serverFeatures package
['account', 'connection', 'interface', 'package', 'process']
"""
from pycaf.architecture.devices.server import Server
from pycaf.architecture.devices.lists.serverList import ServerList
from pycaf.architecture.devices.server_windows import ServerWindows
import pycaf.architecture.devices.server_fea
|
tures.lists
|
BrainTech/openbci
|
obci/control/peer/configured_client.py
|
Python
|
gpl-3.0
| 3,186
| 0.003766
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from multiplexer.clients import connect_client
from obci.control.peer.peer_control import PeerControl, ConfigNotReadyError
import obci.control.common.config_message as cmsg
from obci.utils.openbci_logging import get_logger, log_crash
import sys
class ConfiguredClient(object):
@log_crash
def __init__(self, addresses, type, external_config_file=None):
self.conn = connect_client(addresses=addresses, type=type)
self.ready_to_work = False
self.external_config_file = external_config_file
self.config = PeerControl(peer=self)
self.logger = get_logger(self.config.peer_id,
file_level=self.get_param('file_log_level'),
stream_level=self.get_param('console_log_level'),
mx_level=self.get_param('mx_log_level'),
sentry_level=self.get_param('sentry_log_level'),
conn=self.conn,
log_dir=self.get_param('log_dir'),
obci_peer=self)
self.config.logger = self.logger
self.config.connection = self.conn
self.config.peer_validate_params = self.validate_params
self.config.peer_params_change = self.params_changed
result, details = self.config.initialize_config(self.conn)
if not result:
self.logger.critical(
'Config initialisation FAILED: {0}'.format(details))
sys.exit(1)
else:
self.validate_params(self.config.param_va
|
lues())
@log_crash
def get_param(self, param_name):
return self.config.get_param(param_name)
@log_crash
def set_param(self, param_name, param_value):
self.config.set_param
|
(param_name, param_value)
@log_crash
def ready(self):
self.ready_to_work = True
self.config.register_config(self.conn)
self.config.send_peer_ready(self.conn)
def validate_params(self, params):
self.logger.info("VALIDATE PARAMS, {0}".format(params))
return True
@log_crash
def params_changed(self, params):
self.logger.info("PARAMS CHAnGED, {0}".format(params))
return True
def _param_vals(self):
vals = self.config.param_values()
if 'channels_info' in vals:
vals['channels_info'] = '[...truncated...]'
return vals
def _crash_extra_description(self, exc=None):
return "peer %s config params: %s" % (self.config.peer_id,
self._param_vals())
def _crash_extra_data(self, exc=None):
"""This method is called when the peer crashes, to provide additional
peer data to the crash report.
Should return a dictionary."""
return {
"config_params" : self._param_vals(),
"peer_id": self.config.peer_id,
"experiment_uuid": self.get_param("experiment_uuid")
}
def _crash_extra_tags(self, exception=None):
return {'obci_part' : 'obci',
"experiment_uuid": self.get_param("experiment_uuid")}
|
matteoredaelli/scrapy_tyres
|
scrapy_tyres/spiders/auto-doc_it.py
|
Python
|
gpl-3.0
| 4,693
| 0.012785
|
# -*- coding: utf-8 -*-
# scrapy_web
# Copyright (C) 2016-2017 Matteo.Redaelli@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# usage:
# scrapy crawl auto-doc.it -t jsonlines -o data/a.json
import scrapy
import re
class AutoDocIt(scrapy.Spider):
name = "auto-doc.it"
def __init__(self, width="195", height="65", diameter="15", *args, **kwargs):
super(AutoDocIt, self).__init__(*args, **kwargs)
self.allowed_domains = ["auto-doc.it"]
#self.start_urls = ['http://www.auto-doc.it/pneumatici?Width=%s&CrossSections=%s&Size=%s&Season=&page=1' % (width, height, diameter)]
self.start_urls = ['http://www.auto-doc.it/pneumatici/%d-pollici?page=1' % n for n in [10,12,13,14,15,16,17,18,19,20,21,22,23,24,40,365,390,415]]
def parse(self, response):
for entry in response.xpath('//li[@class="ovVisLi"]'):
url = entry.xpath('.//div[@class="image"]/a/@href').extract_first()
manufacturer_number = entry.xpath('.//div[@class="description"]//span[@style="font-size: 12px;"]/text()').extract_first().replace("MPN: ","")
##brand
brand = entry.xpath('.//img[@class="tires_item_brand"]/@src').extract_first()
match = re.match(".+/(.+)\.png$", brand)
if match:
brand = match.group(1)
if bool(len(re.findall("IMAGE", brand,flags=re.IGNORECASE))):
m=re.match(".+/(.+)-.+-.+$", url)
if m:
brand = m.group(1).replace("-", " ")
ean = entry.xpath('.//span[@class="article_number"]/text()').extract_first().replace("EAN: ","")
product = entry.xpath('.//div[@class="name"]/a/text()').extract_first()
size = entry.xpath('.//div[@class="
|
nr"]/text()').extract_first()
description = "%s %s" % (product, size)
p = re.compile(brand, re.IGNORECASE)
product = re.sub(p,"", product, re.IGNORECASE)
price = entry.xpath('.//p[@class="actual_price"]/text()').extract_first()
picture_url = entry.xpath('.//img[@class="tires_item_image "]/@src').extract_first()
## estract eu labels
eu_fuel = entry.xpath('.//div[@class="eu_re"]//li[2]/img/@src').
|
extract_first()
eu_wet = entry.xpath('.//div[@class="eu_re"]//li[4]/img/@src').extract_first()
eu_noise = entry.xpath('.//div[@class="eu_re"]//li[6]/text()').extract_first()
if eu_fuel:
m=re.match(".+-letter-(.+)\.png",eu_fuel)
if m:
eu_fuel = m.group(1)
else:
eu_fuel = None
if eu_wet:
m=re.match(".+-letter-(.+)\.png",eu_wet)
if m:
eu_wet = m.group(1)
else:
eu_wet = None
details = {
"description": description,
"ean": ean,
"manufacturer_number": manufacturer_number,
"price": price,
"brand": brand,
"product": product,
"size": size,
"picture_url": picture_url,
"url": url,
"label_fuel": eu_fuel,
"label_wet": eu_wet,
"label_noise": eu_noise
}
keys = entry.xpath('.//div[@class="description"]//div[@class="box"]//ul/li/span[@class="lc"]/text()').extract()
## removing : at the end
keys = map(lambda x: x.replace(":","").lower(), keys)
values = entry.xpath('.//div[@class="description"]//div[@class="box"]//ul/li/span[@class="rc"]/text()').extract()
details2 = zip(keys, values)
details.update(details2)
yield details
next_page = response.xpath('//span[@class="next"]/a/@href').extract_first()
if next_page != None:
yield scrapy.Request(next_page, callback=self.parse)
|
nasonfish/mammon
|
mammon/utility.py
|
Python
|
isc
| 7,875
| 0.002159
|
# mammon - utility/third-party stuff, each thing has it's own header and provenance
# information.
# CaseInsensitiveDict from requests.
#
# Copyright 2015 Kenneth Reitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import collections
class CaseInsensitiveDict(collections.MutableMapping):
"""
A case-insensitive ``dict``-like object.
Implements all methods and operations of
``collections.MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
case of the last key to be set, and ``iter(instance)``,
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
will contain case-sensitive keys. However, querying and contains
testing is case insensitive::
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
cid['aCCEPT'] == 'application/json' # True
list(cid) == ['Accept'] # True
For example, ``headers['content-encoding']`` will return the
value of a ``'Content-Encoding'`` response header, regardless
of how the header name was originally stored.
If the constructor, ``.update``, or equality comparison
operations are given keys that have equal ``.casefold()``s, the
behavior is undefined.
"""
def __init__(self, data=None, **kwargs):
self._store = dict()
if data is None:
data = {}
self.update(data, **kwargs)
def __setitem__(self, key, value):
# Use the lowercased key for lookups, but store the actual
# key alongside the value.
self._store[key.casefold()] = (key, value)
def __getitem__(self, key):
return self._store[key.casefold()][1]
def __delitem__(self, key):
del self._store[key.casefold()]
def __iter__(self):
return (casedkey for casedkey, mappedvalue in self._store.values())
def __len__(self):
return len(self._store)
def lower_items(self):
"""Like iteritems(), but with all lowercase keys."""
return (
(lowerkey, keyval[1])
for (lowerkey, keyval)
in self._store.items()
)
def __eq__(self, other):
if isinstance(other, collections.Mapping):
other = CaseInsensitiveDict(other)
else:
return NotImplemented
# Compare insensitively
return dict(self.lower_items()) == dict(other.lower_items())
# Copy is required
def copy(self):
return CaseInsensitiveDict(self._store.values())
def __repr__(self):
return str(dict(self.items()))
# a modified ExpiringDict implementation
#
# Copyright 2013-2015 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ExpiringDict(collections.OrderedDict):
def __init__(self, max_len, max_age_seconds):
collections.OrderedDict.__init__(self)
self.max_len = max_len
s
|
elf.max_age = max_age_seconds
def __contains__(self, key):
try:
item = collections.OrderedDict.__getitem__(self, key.casefold())
if time.time() - item[1] < self.max_age:
return True
else:
del self[key.casefold()]
except KeyError:
pass
return False
def __getitem__(self, key, with_age=False, max_ag
|
e=None):
item = collections.OrderedDict.__getitem__(self, key.casefold())
item_age = time.time() - item[1]
if not max_age:
max_age = self.max_age
if item_age < max_age:
if with_age:
return item[0], item_age
else:
return item[0]
else:
del self[key.casefold()]
raise KeyError(key.casefold())
def __setitem__(self, key, value):
if len(self) == self.max_len:
self.popitem(last=False)
collections.OrderedDict.__setitem__(self, key.casefold(), (value, time.time()))
def pop(self, key, default=None):
try:
item = collections.OrderedDict.__getitem__(self, key.casefold())
del self[key.casefold()]
return item[0]
except KeyError:
return default
def get(self, key, default=None, with_age=False, max_age=None):
try:
return self.__getitem__(key.casefold(), with_age, max_age)
except KeyError:
if with_age:
return default, None
else:
return default
def put(self, key, value, ts=None):
if len(self) == self.max_len:
self.popitem(last=False)
if not ts:
ts = time.time()
collections.OrderedDict.__setitem__(self, key.casefold(), (value, ts))
def items(self):
r = []
for key in self:
try:
r.append((key, self[key]))
except KeyError:
pass
return r
def values(self):
r = []
for key in self:
try:
r.append(self[key])
except KeyError:
pass
return r
def fromkeys(self):
raise NotImplementedError()
def iteritems(self):
raise NotImplementedError()
def itervalues(self):
raise NotImplementedError()
def viewitems(self):
raise NotImplementedError()
def viewkeys(self):
raise NotImplementedError()
def viewvalues(self):
raise NotImplementedError()
# fast irc casemapping validation
# part of mammon, under mammon license.
import string
special = '_-|^{}[]`'
nick_allowed_chars = string.ascii_letters + string.digits + special
nick_allowed_chars_tbl = str.maketrans('', '', nick_allowed_chars)
first_nick_allowed_chars = string.ascii_letters + special
def validate_nick(nick):
if nick[0] not in first_nick_allowed_chars:
return False
remainder = nick[1:]
badchars = remainder.translate(nick_allowed_chars_tbl)
return badchars == ''
chan_allowed_chars = string.ascii_letters + string.digits + special + '`~!@#$%^&*()+=|\\<>/?'
chan_allowed_chars_tbl = str.maketrans('', '', chan_allowed_chars)
def validate_chan(chan_name):
if chan_name[0] != '#':
return False
badchars = chan_name[1:].translate(chan_allowed_chars_tbl)
return badchars == ''
def uniq(input):
output = []
for x in input:
if x not in output:
output.append(x)
return output
class UserHost:
def __init__(self, nuh):
self.nuh = nuh
# XXX - put try:except on these just in case doesn't exist
@property
def nickname(self):
return self.nuh.split('!')[0]
@property
def username(self):
return self.nuh.split('!')[1].split('@')[0]
@property
def hostname(self):
return self.nug.split('@')[1]
|
yaoyansi/mymagicbox
|
common/mymagicbox/AETemplateBase.py
|
Python
|
mit
| 2,176
| 0.011029
|
"""
To create an Attribute Editor template using python, do the following:
1. create a subclass of `uitypes.AETemplate`
2. set its ``_nodeType`` class attribute to the name of the desired node type, or name the class using the
convention ``AE<nodeType>Template``
3. import the module
AETemplates which do not meet one of the two requirements listed in step 2 will be ignored. To ensure that your
Template's node type is being detected correctly, use the `
|
`AETemplate.nodeType()`` class method::
import AETemplates
AETemplates.AEmib_amb_occlusionTemplate.nodeType()
As a convenience, when pymel is imported it will automatically import the module ``AETemplates``, if it exists,
thereby causing any AETemplates w
|
ithin it or its sub-modules to be registered. Be sure to import pymel
or modules containing your ``AETemplate`` classes before opening the Atrribute Editor for the node types in question.
To check which python templates are loaded::
from pymel.core.uitypes import AELoader
print AELoader.loadedTemplates()
The example below demonstrates the simplest case, which is the first. It provides a layout for the mib_amb_occlusion
mental ray shader.
"""
from pymel.core import *
class LocalizedTemplate(ui.AETemplate):
"automatically apply language localizations to template arguments"
def _applyLocalization(self, name):
if name is not None and len(name)>2 and name[0] == 'k' and name[1].isupper():
return mel.uiRes('m_' + self.__class__.__name__ + '.' + name)
return name
def addControl(self, control, label=None, **kwargs):
label = self._applyLocalization(label)
ui.AETemplate.addControl(self, control, label=label, **kwargs)
def beginLayout(self, name, collapse=True):
name = self._applyLocalization(name)
ui.AETemplate.beginLayout(self, name, collapse=collapse)
class mmbTemplateBase(LocalizedTemplate):
def __init__(self, nodeName):
LocalizedTemplate.__init__(self,nodeName)
self.beginScrollLayout()
self.buildBody(nodeName)
self.endScrollLayout()
def AEswatchDisplay(self, nodeName):
mel.AEswatchDisplay(nodeName)
|
doublehou/BiliDan
|
bilidan.py
|
Python
|
mit
| 23,014
| 0.003825
|
#!/usr/bin/env python3
# Biligrab-Danmaku2ASS
#
# Author: Beining@ACICFG https://github.com/cnbeining
# Author: StarBrilliant https://github.com/m13253
#
# Biligrab is licensed under MIT licence
# Permission has been granted for the use of Danmaku2ASS in Biligrab
#
# Copyright (c) 2014
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the “Software”), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
if sys.version_info < (3, 0):
sys.stderr.write('ERROR: Python 3.0 or newer version is required.\n')
sys.exit(1)
import argparse
import gzip
import json
import hashlib
import io
import logging
import math
import os
import re
import subprocess
import tempfile
import urllib.parse
import urllib.request
import xml.dom.minidom
import zlib
USER_AGENT_PLAYER = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0.2) Gecko/20100101 Firefox/6.0.2 Fengfan/1.0'
USER_AGENT_API = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0.2) Gecko/20100101 Firefox/6.0.2 Fengfan/1.0'
APPKEY = '85eb6835b0a1034e' # The same key as in original Biligrab
APPSEC = '2ad42749773c441109bdc0191257a664' # Do not abuse please, get one yourself if you need
BILIGRAB_HEADER = {'User-Agent': USER_AGENT_API, 'Cache-Control': 'no-cache', 'Pragma': 'no-cache'}
def biligrab(url, *, debug=False, verbose=False, media=None, cookie=None, quality=None, source=None, keep_fps=False, mpvflags=[], d2aflags={}):
url_get_metadata = 'http://api.bilibili.com/view?'
url_get_comment = 'http://com
|
ment.bilibili.com/%(cid)s.xml'
if source == 'overseas':
url_get_media = 'http://interface.bil
|
ibili.com/v_cdn_play?'
else:
url_get_media = 'http://interface.bilibili.com/playurl?'
def parse_url(url):
'''Parse a bilibili.com URL
Return value: (aid, pid)
'''
regex = re.compile('http:/*[^/]+/video/av(\\d+)(/|/index.html|/index_(\\d+).html)?(\\?|#|$)')
regex_match = regex.match(url)
if not regex_match:
raise ValueError('Invalid URL: %s' % url)
aid = regex_match.group(1)
pid = regex_match.group(3) or '1'
return aid, pid
def fetch_video_metadata(aid, pid):
'''Fetch video metadata
Arguments: aid, pid
Return value: {'cid': cid, 'title': title}
'''
req_args = {'type': 'json', 'appkey': APPKEY, 'id': aid, 'page': pid}
req_args['sign'] = bilibili_hash(req_args)
_, response = fetch_url(url_get_metadata+urllib.parse.urlencode(req_args), user_agent=USER_AGENT_API, cookie=cookie)
try:
response = dict(json.loads(response.decode('utf-8', 'replace')))
except (TypeError, ValueError):
raise ValueError('Can not get \'cid\' from %s' % url)
if 'error' in response:
logging.error('Error message: %s' % response.get('error'))
if 'cid' not in response:
raise ValueError('Can not get \'cid\' from %s' % url)
return response
def get_media_urls(cid, *, fuck_you_bishi_mode=False):
'''Request the URLs of the video
Arguments: cid
Return value: [media_urls]
'''
if source in {None, 'overseas'}:
user_agent = USER_AGENT_API if not fuck_you_bishi_mode else USER_AGENT_PLAYER
req_args = {'appkey': APPKEY, 'cid': cid}
if quality is not None:
req_args['quality'] = quality
req_args['sign'] = bilibili_hash(req_args)
_, response = fetch_url(url_get_media+urllib.parse.urlencode(req_args), user_agent=user_agent, cookie=cookie)
media_urls = [str(k.wholeText).strip() for i in xml.dom.minidom.parseString(response.decode('utf-8', 'replace')).getElementsByTagName('durl') for j in i.getElementsByTagName('url')[:1] for k in j.childNodes if k.nodeType == 4]
if not fuck_you_bishi_mode and media_urls == ['http://static.hdslb.com/error.mp4']:
logging.error('Detected User-Agent block. Switching to fuck-you-bishi mode.')
return get_media_urls(cid, fuck_you_bishi_mode=True)
elif source == 'html5':
req_args = {'aid': aid, 'page': pid}
logging.warning('HTML5 video source is experimental and may not always work.')
_, response = fetch_url('http://www.bilibili.com/m/html5?'+urllib.parse.urlencode(req_args), user_agent=USER_AGENT_PLAYER)
response = json.loads(response.decode('utf-8', 'replace'))
media_urls = [dict.get(response, 'src')]
if not media_urls[0]:
media_urls = []
if not fuck_you_bishi_mode and media_urls == ['http://static.hdslb.com/error.mp4']:
logging.error('Failed to request HTML5 video source. Retrying.')
return get_media_urls(cid, fuck_you_bishi_mode=True)
elif source == 'flvcd':
req_args = {'kw': url}
if quality is not None:
if quality == 3:
req_args['quality'] = 'high'
elif quality >= 4:
req_args['quality'] = 'super'
_, response = fetch_url('http://www.flvcd.com/parse.php?'+urllib.parse.urlencode(req_args), user_agent=USER_AGENT_PLAYER)
resp_match = re.search('<input type="hidden" name="inf" value="([^"]+)"', response.decode('gbk', 'replace'))
if resp_match:
media_urls = resp_match.group(1).rstrip('|').split('|')
else:
media_urls = []
elif source == 'bilipr':
req_args = {'cid': cid}
quality_arg = '1080' if quality is not None and quality >= 4 else '720'
logging.warning('BilibiliPr video source is experimental and may not always work.')
resp_obj, response = fetch_url('http://pr.lolly.cc/P%s?%s' % (quality_arg, urllib.parse.urlencode(req_args)), user_agent=USER_AGENT_PLAYER)
if resp_obj.getheader('Content-Type', '').startswith('text/xml'):
media_urls = [str(k.wholeText).strip() for i in xml.dom.minidom.parseString(response.decode('utf-8', 'replace')).getElementsByTagName('durl') for j in i.getElementsByTagName('url')[:1] for k in j.childNodes if k.nodeType == 4]
else:
media_urls = []
else:
assert source in {None, 'overseas', 'html5', 'flvcd', 'bilipr'}
if len(media_urls) == 0 or media_urls == ['http://static.hdslb.com/error.mp4']:
raise ValueError('Can not get valid media URLs.')
return media_urls
def get_video_size(media_urls):
'''Determine the resolution of the video
Arguments: [media_urls]
Return value: (width, height)
'''
try:
if media_urls[0].startswith('http:') or media_urls[0].startswith('https:'):
ffprobe_command = ['ffprobe', '-icy', '0', '-loglevel', 'repeat+warning' if verbose else 'repeat+error', '-print_format', 'json', '-select_streams', 'v', '-show_streams', '-timeout', '60000000', '-user-agent', USER_AGENT_PLAYER, '--', media_urls[0]]
else:
ffprobe_command = ['ffprobe', '-loglevel', 'repeat+warning' if verbose else 'repeat+error', '-print_format', 'json', '-select_streams', 'v', '-s
|
pylanglois/uwsa
|
uwsas/commands/command_manager.py
|
Python
|
bsd-3-clause
| 619
| 0.004847
|
#!/usr/bin/env python
# coding=UTF-8
__author__ = "Pierre-Yves Langlois"
__copyright__ = "https://github.com/pylanglois/uwsa/blob/master/LICENCE"
__credits__ = ["Pierre-Yves Langlois"]
__license__ = "BSD"
__maintainer__ = "Pierre-Yves Langlois"
from uwsas.common import *
from uwsas.commands.abstract_command import AbstractCommand
class CommandManager(AbstractCommand):
NAME = 'CommandManager'
def __init__(self):
AbstractCommand.__init__(self)
self.help = t(""
|
"
Usage: uwsa cmd param
where cmd in %s
""")
def get_log_name(self):
return 'uwsas'
cmanager = CommandManager()
|
|
superberny70/plugin.video.pelisalacarta-3-9X
|
pelisalacarta/channels/nolomires.py
|
Python
|
gpl-3.0
| 24,186
| 0.027578
|
# -*- coding: iso-8859-1 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Canal para nolomires.com by Bandavi
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
import sys
import xbmc
import xbmcgui
import xbmcplugin
from core import scrapertools
from core import config
from core import logger
from platformcode.xbmc import xbmctools
from core.item import Item
from servers import servertools
from servers import vk
from pelisalacarta import buscador
__channel__ = "nolomires"
__adult__ = "false"
__category__ = "F"
__type__ = "xbmc"
__title__ = "NoloMires"
__thumbnail__ = ""
__language__ = "ES"
DEBUG = config.get_setting("debug")
# Esto permite su ejecución en modo emulado
try:
pluginhandle = int( sys.argv[ 1 ] )
except:
pluginhandle = ""
# Traza el inicio del canal
logger.info("[nolomires.py] init")
DEBUG = True
def mainlist(params,url,category):
logger.info("[nolomires.py] mainlist")
# Añade al listado de XBMC
xbmctools.addnewfolder( __channel__ , "search" , category , "Buscar","http://www.nolomires.com/","","")
xbmctools.addnewfolder( __channel__ , "LastSearch" , category , "Peliculas Buscadas Recientemente","http://www.nolomires.com/","","")
xbmctools.addnewfolder( __channel__ , "listvideosMirror" , category , "Ultimos Estrenos","http://www.nolomires.com/","","")
#xbmctools.addnewfolder( __channel__ , "TagList" , category , "Tag de Estrenos por año" ,"http://www.nolomires.com/","","")
xbmctools.addnewfolder( __channel__ , "MostWatched" , category , "Peliculas Mas Vistas","http://www.nolomires.com/","","")
xbmctools.addnewfolder( __channel__ , "ListaCat" , category , "Listado por Categorias" ,"http://www.nolomires.com/","","")
xbmctools.addnewfolder( __channel__ , "listvideos" , category , "Ultimas Películas Añadidas" ,"http://www.nolomires.com/category/peliculas-en-nolomires/","","")
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=int( sys.argv[ 1 ] ), category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=int( sys.argv[ 1 ] ), succeeded=True )
def searchresults(params,Url,category):
logger.info("[nolomires.py] s
|
earch")
buscador.salvar_busquedas(params,Url,category)
Url = Url.replace(" ", "+")
searchUrl = "http://www.nolomires.com/?s="+Url+"&x=1
|
5&y=19"
listvideos(params,searchUrl,category)
def search(params,Url,category):
buscador.listar_busquedas(params,Url,category)
def ListaCat(params,url,category):
logger.info("[nolomires.py] ListaCat")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Acción","http://www.nolomires.com/category/peliculas-de-accion/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Animado","http://www.nolomires.com/category/peliculas-de-animacion/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Anime","http://www.nolomires.com/category/anime/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Aventura","http://www.nolomires.com/category/peliculas-de-aventura/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Bèlico Guerra ","http://www.nolomires.com/category/peliculas-de-guerra/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Ciencia-Ficción","http://www.nolomires.com/category/peliculas-de-ciencia-ficcion/","","")
#xbmctools.addnewfolder( __channel__ ,"listvideosMirror", category , "Clásicos","http://www.nolomires.com/category/peliculasclasicos/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Comedia","http://www.nolomires.com/category/peliculas-de-comedia/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Documentales","http://www.nolomires.com/category/peliculas-sobre-documentales/","","")
#xbmctools.addnewfolder( __channel__ ,"listvideosMirror", category , "Destacado","http://www.nolomires.com/category/peliculasdestacado/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Documentales Online","http://www.nolomires.com/category/documentales-online-completos/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Drama","http://www.nolomires.com/category/peliculas-de-drama/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Entretenimiento","http://www.nolomires.com/category/entretenimiento/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Estrenos","http://www.nolomires.com/category/ultimos-extrenos/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "General","http://www.nolomires.com/category/general/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Intriga","http://www.nolomires.com/category/peliculas-de-intriga/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Musicales","http://www.nolomires.com/category/peliculas-musicales/","","")
#xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Peliculas HD","http://www.nolomires.com/category/peliculaspeliculas-hd-categorias/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Romance","http://www.nolomires.com/category/peliculas-sobre-romance/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Suspenso","http://www.nolomires.com/category/peliculas-de-suspenso/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Terror","http://www.nolomires.com/category/peliculas-de-terror/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Thriller","http://www.nolomires.com/category/peliculas-de-thriller/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Todas las Peliculas","http://www.nolomires.com/category/peliculas-en-nolomires/","","")
# Asigna el título, desactiva la ordenación, y cierra el directorio
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def TagList(params,url,category):
logger.info("[nolomires.py] TagList")
# Descarga la página
data = scrapertools.cachePage(url)
#logger.info(data)
# Patron de las entradas
patronvideos = "<a href='([^']+)' class='[^']+' title='[^']+' style='[^']+'" # URL
patronvideos += ">([^<]+)</a>" # TITULO
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
# Añade las entradas encontradas
for match in matches:
# Atributos
scrapedtitle = acentos(match[1])
scrapedurl = match[0]
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
xbmctools.addnewfolder( __channel__ , "listvideos" , category , scrapedtitle , scrapedurl , scrapedthumbnail, scrapedplot )
# Asigna el título, desactiva la ordenación, y cierra el directorio
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def MostWatched(params,url,category):
logger.info("[nolomires.py] MostWatched")
# Descarga la página
data = scrapertools.cachePage(url)
#logger.info(data)
# Patron de las entradas
patronvideos = '<li><a href="([^"]+)" ' # URL
patronvideos += 'title="([^"]+)">[^<]+' # TITULO
patronvideos += '</a>([^<
|
abilian/abilian-core
|
src/abilian/web/admin/panel.py
|
Python
|
lgpl-2.1
| 1,333
| 0.0015
|
""""""
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable, Dict
if TYPE_CHECKING:
from .extension import Admin
class AdminPanel:
"""Base classe for admin panels.
Currentl
|
y this class does nothing. It may be useful in the future
either as just a marker interface (for automatic plugin discovery /
registration), or to add some common functionnalities. Otherwise, it
will be removed.
"""
id: str = ""
label: str = ""
icon: str = ""
admin: Admin
def url_value_preprocess(self, endpoint: str, view_args: dict[Any, Any]):
"""Panel can pr
|
eprocess values for their views.
This method is called only if the endpoint is for `get()`, `post()`, or
one of the views installed with `install_additional_rules`.
This is also the right place to add items to the breadcrumbs.
"""
def install_additional_rules(self, add_url_rule: Callable):
"""This method can be redefined in subclasses to install custom url
rules.
All rules are relative to panel 'base' rule, don't prefix rules with panel
id, it will be done by `add_url_rule`.
:param add_url_rule: function to use to add url rules, same interface as
:meth:`flask.blueprint.Blueprint.add_url_rule`.
"""
|
shlomif/PySolFC
|
pysollib/games/klondike.py
|
Python
|
gpl-3.0
| 55,884
| 0
|
#!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# ---------------------------------------------------------------------------##
#
# Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
# Copyright (C) 2003 Mt. Hood Playing Card Co.
# Copyright (C) 2005-2009 Skomoroh
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------##
import pysollib.game
from pysollib.game import Game
from pysollib.gamedb import GI, GameInfo, registerGame
from pysollib.games.canfield import CanfieldRush_Talon
from pysollib.hint import CautiousDefaultHint
from pysollib.hint import FreeCellSolverWrapper
from pysollib.hint import KlondikeType_Hint
from pysollib.layout import Layout
from pysollib.mfxutil import Struct, kwdefault
from pysollib.mygettext import _
from pysollib.pysoltk import MfxCanvasText
from pysollib.stack import \
AC_RowStack, \
BO_RowStack, \
DealRowTalonStack, \
InitialDealTalonStack, \
KingAC_RowStack, \
KingSS_RowStack, \
OpenStack, \
OpenTalonStack, \
RK_FoundationStack, \
RK_RowStack, \
RedealTalonStack, \
ReserveStack, \
SC_RowStack, \
SS_FoundationStack, \
SS_RowStack, \
Stack, \
StackWrapper, \
SuperMoveAC_RowStack, \
UD_SS_RowStack, \
WasteStack, \
WasteTalonStack, \
isSameColorSequence
from pysollib.util import ACE, ANY_RANK, ANY_SUIT, KING, NO_RANK
# ************************************************************************
# * Klondike
# ************************************************************************
class Klondike(Game):
Layout_Method = staticmethod(Layout.klondikeLayout)
Talon_Class = WasteTalonStack
Foundation_Class = SS_FoundationStack
RowStack_Class = KingAC_RowStack
Hint_Class = KlondikeType_Hint
def createGame(self, max_rounds=-1, num_deal=1, **layout):
# create layout
lay, s = Layout(self), self.s
kwdefault(layout, rows=7, waste=1, texts=1, playcards=16)
self.Layout_Method.__get__(lay, lay.__class__)(**layout)
# self.__class__.Layout_Method(lay, **layout)
self.setSize(lay.size[0], lay.size[1])
# create stacks
s.talon = self.Talon_Class(lay.s.talon.x, lay.s.talon.y, self,
max_rounds=max_rounds, num_deal=num_deal)
if lay.s.waste:
s.waste = WasteStack(lay.s.waste.x, lay.s.waste.y, self)
for r in lay.s.foundations:
s.foundations.append(
self.Foundation_Class(r.x, r.y, self, suit=r.suit))
for r in lay.s.rows:
s.rows.append(self.RowStack_Class(r.x, r.y, self))
# default
lay.defaultAll()
r
|
eturn lay
def startGame(self, flip=0, reverse=1):
for i in range(1, len(self.s.rows)):
self.s.talon.dealRow(
rows=self.s.rows[i:], flip=flip, frames=0, reverse=reverse)
self.startDealSample()
self.s.talon.dealRow(reverse=reverse)
if self.s.waste:
self.s.talon.dealCards() # deal first card to WasteStack
shallHighlightMatch = Game._shallHighlightMatch_AC
# ***********************
|
*************************************************
# * Vegas Klondike
# ************************************************************************
class VegasKlondike(Klondike):
getGameScore = Game.getGameScoreCasino
getGameBalance = Game.getGameScoreCasino
def createGame(self, max_rounds=1):
lay = Klondike.createGame(self, max_rounds=max_rounds)
self.texts.score = MfxCanvasText(self.canvas,
8, self.height - 8, anchor="sw",
font=self.app.getFont("canvas_large"))
return lay
def updateText(self):
if self.preview > 1:
return
b1, b2 = self.app.stats.gameid_balance, 0
if self.shallUpdateBalance():
b2 = self.getGameBalance()
t = _("Balance $%d") % (b1 + b2)
self.texts.score.config(text=t)
def getDemoInfoTextAttr(self, tinfo):
return tinfo[1] # "se" corner
# ************************************************************************
# * Casino Klondike
# ************************************************************************
class CasinoKlondike(VegasKlondike):
def createGame(self):
lay = VegasKlondike.createGame(self, max_rounds=3)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
# ************************************************************************
# * Klondike by Threes
# ************************************************************************
class KlondikeByThrees(Klondike):
def createGame(self):
Klondike.createGame(self, num_deal=3)
# ************************************************************************
# * Trigon
# ************************************************************************
class Trigon(Klondike):
RowStack_Class = KingSS_RowStack
# ************************************************************************
# * Thumb and Pouch
# * Chinaman
# ************************************************************************
class ThumbAndPouch(Klondike):
RowStack_Class = BO_RowStack
def createGame(self):
Klondike.createGame(self, max_rounds=1)
def shallHighlightMatch(self, stack1, card1, stack2, card2):
return (card1.suit != card2.suit and
(card1.rank + 1 == card2.rank or
card2.rank + 1 == card1.rank))
class Chinaman(ThumbAndPouch):
RowStack_Class = StackWrapper(BO_RowStack, base_rank=KING)
def createGame(self):
lay = Klondike.createGame(self, num_deal=3,
max_rounds=2, round_text=True)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
# ************************************************************************
# * Whitehead
# ************************************************************************
class Whitehead_RowStack(SS_RowStack):
def _isAcceptableSequence(self, cards):
return isSameColorSequence(cards, self.cap.mod, self.cap.dir)
def getHelp(self):
return _('Tableau. Build down by color. Sequences of cards '
'in the same suit can be moved as a unit.')
class Whitehead(Klondike):
RowStack_Class = Whitehead_RowStack
Hint_Class = CautiousDefaultHint
def createGame(self):
Klondike.createGame(self, max_rounds=1)
def startGame(self):
Klondike.startGame(self, flip=1)
shallHighlightMatch = Game._shallHighlightMatch_SS
getQuickPlayScore = Game._getSpiderQuickPlayScore
# ************************************************************************
# * Small Harp (Klondike in a different layout)
# ************************************************************************
class SmallHarp(Klondike):
Layout_Method = staticmethod(Layout.gypsyLayout)
def startGame(self):
for i in range(len(self.s.rows)):
self.s.talon.dealRow(rows=self.s.rows[:i], flip=0, frames=0)
self._startAndDealRowAndCards()
# ************************************************************************
# * Eastcliff
# * Easthaven
# ************************************************************************
class Eastcliff(Klondike):
RowStack_Class = AC_RowStack
def createGame(self):
Klondike.createGame(self, max_rounds=1)
def startGame(sel
|
cmouse/buildbot
|
master/buildbot/steps/source/svn.py
|
Python
|
gpl-2.0
| 17,366
| 0.000921
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import re
import xml.dom.minidom
import xml.parsers.expat
from urllib.parse import quote as urlquote
from urllib.parse import unquote as urlunquote
from urllib.parse import urlparse
from urllib.parse import urlunparse
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import log
from buildbot.config import ConfigErrors
from buildbot.interfaces import WorkerSetupError
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.steps.source.base import Source
class SVN(Source):
"""I perform Subversion checkout/update operations."""
name = 'svn'
renderables = ['repourl', 'password']
possible_methods = ('clean', 'fresh', 'clobber', 'copy', 'export', None)
def __init__(self, repourl=None, mode='incremental',
method=None, username=None,
password=None, extra_args=None, keep_on_purge=None,
depth=None, preferLastChangedRev=False, **kwargs):
self.repourl = repourl
self.username = username
self.password = password
self.extra_args = extra_args
self.keep_on_purge = keep_on_purge or []
self.depth = depth
self.method = method
self.mode = mode
self.preferLastChangedRev = preferLastChangedRev
super().__init__(**kwargs)
errors = []
if not self._hasAttrGroupMember('mode', self.mode):
errors.append("mode {} is not one of {}".format(self.mode,
self._listAttrGroupMembers('mode')))
if self.method not in self.possible_methods:
errors.append("method {} is not one of {}".format(self.method, self.possible_methods))
if repourl is None:
errors.append("you must provide repourl")
if errors:
raise ConfigErrors(e
|
rrors)
@defer.inlineCallbacks
def run_vc(self, branch, revision, patch):
self.revision = revision
self.method = self._getMethod()
self.stdio_log = yield self.addLogForRemoteCommands("stdio")
# if the version is new enough, and the password is set, then obfuscate
# it
if self.password is not None:
if not self.w
|
orkerVersionIsOlderThan('shell', '2.16'):
self.password = ('obfuscated', self.password, 'XXXXXX')
else:
log.msg("Worker does not understand obfuscation; "
"svn password will be logged")
installed = yield self.checkSvn()
if not installed:
raise WorkerSetupError("SVN is not installed on worker")
patched = yield self.sourcedirIsPatched()
if patched:
yield self.purge(False)
yield self._getAttrGroupMember('mode', self.mode)()
if patch:
yield self.patch(patch)
res = yield self.parseGotRevision()
return res
@defer.inlineCallbacks
def mode_full(self):
if self.method == 'clobber':
yield self.clobber()
return
elif self.method in ['copy', 'export']:
yield self.copy()
return
updatable = yield self._sourcedirIsUpdatable()
if not updatable:
# blow away the old (un-updatable) directory and checkout
yield self.clobber()
elif self.method == 'clean':
yield self.clean()
elif self.method == 'fresh':
yield self.fresh()
@defer.inlineCallbacks
def mode_incremental(self):
updatable = yield self._sourcedirIsUpdatable()
if not updatable:
# blow away the old (un-updatable) directory and checkout
yield self.clobber()
else:
# otherwise, do an update
command = ['update']
if self.revision:
command.extend(['--revision', str(self.revision)])
yield self._dovccmd(command)
@defer.inlineCallbacks
def clobber(self):
yield self.runRmdir(self.workdir, timeout=self.timeout)
yield self._checkout()
@defer.inlineCallbacks
def fresh(self):
yield self.purge(True)
cmd = ['update']
if self.revision:
cmd.extend(['--revision', str(self.revision)])
yield self._dovccmd(cmd)
@defer.inlineCallbacks
def clean(self):
yield self.purge(False)
cmd = ['update']
if self.revision:
cmd.extend(['--revision', str(self.revision)])
yield self._dovccmd(cmd)
@defer.inlineCallbacks
def copy(self):
yield self.runRmdir(self.workdir, timeout=self.timeout)
checkout_dir = 'source'
if self.codebase:
checkout_dir = self.build.path_module.join(
checkout_dir, self.codebase)
# temporarily set workdir = checkout_dir and do an incremental checkout
try:
old_workdir = self.workdir
self.workdir = checkout_dir
yield self.mode_incremental()
finally:
self.workdir = old_workdir
self.workdir = old_workdir
# if we're copying, copy; otherwise, export from source to build
if self.method == 'copy':
cmd = remotecommand.RemoteCommand('cpdir',
{'fromdir': checkout_dir, 'todir': self.workdir,
'logEnviron': self.logEnviron})
else:
export_cmd = ['svn', 'export']
if self.revision:
export_cmd.extend(["--revision", str(self.revision)])
if self.username:
export_cmd.extend(['--username', self.username])
if self.password is not None:
export_cmd.extend(['--password', self.password])
if self.extra_args:
export_cmd.extend(self.extra_args)
export_cmd.extend([checkout_dir, self.workdir])
cmd = remotecommand.RemoteShellCommand('', export_cmd,
env=self.env, logEnviron=self.logEnviron,
timeout=self.timeout)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.didFail():
raise buildstep.BuildStepFailed()
@defer.inlineCallbacks
def _dovccmd(self, command, collectStdout=False, collectStderr=False, abandonOnFailure=True):
assert command, "No command specified"
command.extend(['--non-interactive', '--no-auth-cache'])
if self.username:
command.extend(['--username', self.username])
if self.password is not None:
command.extend(['--password', self.password])
if self.depth:
command.extend(['--depth', self.depth])
if self.extra_args:
command.extend(self.extra_args)
cmd = remotecommand.RemoteShellCommand(self.workdir, ['svn'] + command,
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
collectStdout=collectStdout,
collectStderr=collectStderr)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.didFa
|
mfcloud/python-zvm-sdk
|
sample/vlan/createvm_vlan_v1s2.py
|
Python
|
apache-2.0
| 4,094
| 0.005374
|
# Copyright 2017 IBM Corp.
from zvmconnector import connector
import os
import time
print("Setup client: client=connector.ZVMConnector('9.60.18.170', 8080)\n")
client=connector.ZVMConnector('9.60.18.170', 8080)
print("Test: send_request('vswitch_get_list')")
list = client.send_request('vswitch_get_list')
print("Result: %s\n" % list)
GUEST_USERID = 'DEMOV1S2'
GUEST_PROFILE = 'osdflt'
GUEST_VCPUS = 1
GUEST_MEMORY = 2048
DISK_POOL = 'ECKD:POOL1'
IMAGE_PATH = '/tmp/rhel7eckd_IUCV_zvmguestconfigure.img'
IMAGE_OS_VERSION = 'rhel7.0'
GUEST_IP_ADDR = '192.168.100.3'
GATEWAY = '192.168.100.1'
CIDR = '192.168.100.1/24'
VLANID = 100
VSWITCH_NAME = 'Datanet1'
network_info = [{'ip_addr': GUEST_IP_ADDR, 'gateway_addr': GATEWAY, 'cidr': CIDR}]
image_name = os.path.basename(IMAGE_PATH)
url = 'file://' + IMAGE_PATH
print("Parameter list:")
print("GUEST_USERID: %s" % GUEST_USERID)
print("GUEST_PRO
|
FILE: %s" % GUEST_PROFILE)
p
|
rint("GUEST_VCPUS: %s" % GUEST_VCPUS)
print("GUEST_MEMORY: %s" % GUEST_MEMORY)
print("DISK_POOL: %s" % DISK_POOL)
print("IMAGE_PATH: %s" % IMAGE_PATH)
print("IMAGE_OS_VERSION: %s" % IMAGE_OS_VERSION)
print("image_name: %s" % image_name)
print("url: %s" % url)
print("network_info: %s" % network_info)
print("-----------------------------------------------------------------------------------------------------------\n")
print("Import image: send_request('image_import', '%s', url, {'os_version': '%s'})" % (image_name, IMAGE_OS_VERSION))
info = client.send_request('image_import', image_name, url, {'os_version': IMAGE_OS_VERSION})
print('Result: %s\n' % info)
print("Get image size: send_request('image_get_root_disk_size', '%s')" % image_name)
info = client.send_request('image_get_root_disk_size', image_name)
print('Result: %s\n' % info)
size=info['output']
disks_list = [{'size': size, 'is_boot_disk': True, 'disk_pool': DISK_POOL}]
print("set disks_list: %s\n" % disks_list)
print("Create guest: send_request('guest_create', '%s', '%s', '%s', disk_list='%s', user_profile='%s')" %
(GUEST_USERID, GUEST_VCPUS, GUEST_MEMORY, disks_list, GUEST_PROFILE))
info = client.send_request('guest_create', GUEST_USERID, GUEST_VCPUS, GUEST_MEMORY, disk_list=disks_list, user_profile=GUEST_PROFILE)
print('Result: %s\n' % info)
print("Guest deploy: send_request('guest_deploy', '%s', '%s')" % (GUEST_USERID, image_name))
info = client.send_request('guest_deploy', GUEST_USERID, image_name)
print('Result: %s\n' % info)
print("Set network: send_request('guest_create_network_interface', '%s', '%s', '%s')" %(GUEST_USERID, IMAGE_OS_VERSION, network_info))
info = client.send_request('guest_create_network_interface', GUEST_USERID, IMAGE_OS_VERSION, network_info)
print('Result: %s\n' % info)
nic = info['output'][0]['nic_vdev']
print("Couple network: send_request('guest_nic_couple_to_vswitch', '%s', '%s', '%s')" % (GUEST_USERID, nic, VSWITCH_NAME))
info = client.send_request('guest_nic_couple_to_vswitch', GUEST_USERID, '1000', VSWITCH_NAME)
print('Result: %s\n' % info)
print("Set VLAN ID")
info = client.send_request('vswitch_set_vlan_id_for_user', VSWITCH_NAME, GUEST_USERID, VLANID)
print('Result: %s\n' % info)
print("Grant user: send_request('vswitch_grant_user', '%s', '%s')" % (VSWITCH_NAME, GUEST_USERID))
info = client.send_request('vswitch_grant_user', VSWITCH_NAME, GUEST_USERID)
print('Result: %s\n' % info)
print("Check power state: send_request('guest_get_power_state', '%s')" % GUEST_USERID)
info = client.send_request('guest_get_power_state', GUEST_USERID)
print('Result: %s\n' % info)
print("Start guest: send_request('guest_start', '%s')" % GUEST_USERID)
info = client.send_request('guest_start', GUEST_USERID)
print('Result: %s\n' % info)
print("Check power state: send_request('guest_get_power_state', '%s')" % GUEST_USERID)
info = client.send_request('guest_get_power_state', GUEST_USERID)
print('Result: %s\n' % info)
print("Get user direct: send_request('guest_get_definition_info', '%s')" % GUEST_USERID)
info = client.send_request('guest_get_definition_info', GUEST_USERID)
print('Result: %s\n' % info)
print('Completed\n')
|
alexquick/dokku
|
contrib/dokku-installer.py
|
Python
|
mit
| 13,390
| 0.002315
|
#!/usr/bin/env python2.7
import cgi
import json
import os
import re
import SimpleHTTPServer
import SocketServer
import subprocess
import sys
import threading
VERSION = 'v0.14.6'
hostname = ''
try:
command = "bash -c '[[ $(dig +short $HOSTNAME) ]] && echo $HOSTNAME || wget -q -O - icanhazip.com'"
hostname = subprocess.check_output(command, shell=True)
if ':' in hostname:
hostname = ''
except subprocess.CalledProcessError:
pass
key_file = os.getenv('KEY_FILE', None)
if os.path.isfile('/home/ec2-user/.ssh/authorized_keys'):
key_file = '/home/ec2-user/.ssh/authorized_keys'
elif os.path.isfile('/home/ubuntu/.ssh/authorized_keys'):
key_file = '/home/ubuntu/.ssh/authorized_keys'
else:
key_file = '/root/.ssh/authorized_keys'
admin_keys = []
if os.path.isfile(key_file):
try:
command = "cat {0}".format(key_file)
admin_keys = subprocess.check_output(command, shell=True).strip().split("\n")
except subprocess.CalledProcessError:
pass
def check_boot():
if 'onboot' not in sys.argv:
return
init_dir = os.getenv('INIT_DIR', '/etc/init')
systemd_dir = os.getenv('SYSTEMD_DIR', '/etc/systemd/system')
nginx_dir = os.getenv('NGINX_DIR', '/etc/nginx/conf.d')
if os.path.exists(init_dir):
with open('{0}/dokku-installer.conf'.format(init_dir), 'w') as f:
f.write("start on runlevel [2345]\n")
f.write("exec {0} selfdestruct\n".format(os.path.abspath(__file__)))
if os.path.exists(systemd_dir):
with open('{0}/dokku-installer.service'.format(systemd_dir), 'w') as f:
f.write("[Unit]\n")
f.write("Description=Dokku web-installer\n")
f.write("\n")
f.write("[Service]\n")
f.write("ExecStart={0} selfdestruct\n".format(os.path.abspath(__file__)))
f.write("\n")
f.write("[Install]\n")
f.write("WantedBy=multi-user.target\n")
f.write("WantedBy=graphical.target\n")
if os.path.exists(nginx_dir):
with open('{0}/dokku-installer.conf'.format(nginx_dir), 'w') as f:
f.write("upstream dokku-installer { server 127.0.0.1:2000; }\n")
f.write("server {\n")
f.write(" listen 80;\n")
f.write(" location / {\n")
f.write(" proxy_pass http://dokku-installer;\n")
f.write(" }\n")
f.write("}\n")
subprocess.call('rm -f /etc/nginx/sites-enabled/*', shell=True)
sys.exit(0)
class GetHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
content = PAGE.replace('{VERSION}', VERSION)
content = content.replace('{HOSTNAME}', hostname)
content = content.replace('{AUTHORIZED_KEYS_LOCATION}', key_file)
content = content.replace('{ADMIN_KEYS}', "\n".join(admin_keys))
self.send_response(200)
self.end_headers()
self.wfile.write(content)
def do_POST(self):
if self.path not in ['/setup', '/setup/']:
return
params = cgi.FieldStorage(fp=self.rfile,
headers=self.headers,
environ={
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type']})
vhost_enable = 'false'
dokku_root = os.getenv('DOKKU_ROOT', '/home/dokku')
if 'vhost' in params and params['vhost'].value == 'true':
vhost_enable = 'true'
with open('{0}/VHOST'.format(dokku_root), 'w') as f:
f.write(params['hostname'].value)
else:
try:
os.remove('{0}/VHOST'.format(dokku_root))
except OSError:
pass
with open('{0}/HOSTNAME'.format(dokku_root), 'w') as f:
f.write(params['hostname'].value)
for (index, key) in enumerate(params['keys'].value.splitlines(), 1):
user = 'admin'
if self.admin_user_exists() is not None:
user = 'web-admin'
if self.web_admin_user_exists() is not None:
index = int(self.web_admin_user_exists()) + 1
elif self.web_admin_user_exists() is None:
index = 1
elif self.admin_user_exists() is None:
pass
else:
index = int(self.admin_user_exists()) + 1
user = user + str(index)
command = ['sshcommand', 'acl-add', 'dokku', user]
proc = subprocess.Popen(command, stdin=subprocess.PIPE)
proc.stdin.write(key)
proc.stdin.close()
proc.wait()
set_debconf_selection('boolean', 'nginx_enable', 'true')
set_debconf_selection('boolean', 'skip_key_file', 'true')
set_debconf_selection('boolean', 'vhost_enable', vhost_enable)
set_debconf_selection('boolean', 'web_config', 'false')
set_debconf_selection('string', 'hostname', params['hostname'].value)
if 'selfdestruct' in sys.argv:
DeleteInstallerThread()
self.send_response(200)
self.end_headers()
self.wfile.write(json.dumps({'status': 'ok'}))
def web_admin_user_exists(self):
return self.user_exists('web-admin(\d+)')
def admin_user_exists(self):
return self.user_exists('admin(\d+)')
def user_exists(self, name):
command = 'dokku ssh-keys:list'
pattern = re.compile(r'NAME="' + name + '"')
proc = s
|
ubprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
max_num = 0
exists = False
for line in proc.stdout:
m = pattern.search(line)
if m:
# User of the form `user` or `user#` exists
exists = True
max_num = max(max_num, m.group(1))
if exists:
return max_num
else:
return No
|
ne
def set_debconf_selection(debconf_type, key, value):
found = False
with open('/etc/os-release', 'r') as f:
for line in f:
if 'debian' in line:
found = True
if not found:
return
ps = subprocess.Popen(['echo', 'dokku dokku/{0} {1} {2}'.format(
key, debconf_type, value
)], stdout=subprocess.PIPE)
try:
subprocess.check_output(['debconf-set-selections'], stdin=ps.stdout)
except subprocess.CalledProcessError:
pass
ps.wait()
class DeleteInstallerThread(object):
def __init__(self, interval=1):
thread = threading.Thread(target=self.run, args=())
thread.daemon = True
thread.start()
def run(self):
command = "rm /etc/nginx/conf.d/dokku-installer.conf && /etc/init.d/nginx stop && /etc/init.d/nginx start"
try:
subprocess.call(command, shell=True)
except:
pass
command = "rm -f /etc/init/dokku-installer.conf /etc/systemd/system/dokku-installer.service && (stop dokku-installer || systemctl stop dokku-installer.service)"
try:
subprocess.call(command, shell=True)
except:
pass
def main():
check_boot()
port = int(os.getenv('PORT', 2000))
httpd = SocketServer.TCPServer(("", port), GetHandler)
print "Listening on 0.0.0.0:{0}, CTRL+C to stop".format(port)
httpd.serve_forever()
PAGE = """
<html>
<head>
<meta charset="utf-8" />
<title>Dokku Setup</title>
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css" integrity="sha384-MCw98/SFnGE8fJT3GXwEOngsV7Zt27NXFoaoApmYm81iuXoPkFOJwJ8ERdknLPMO" crossorigin="anonymous">
<style>
.bd-callout {
padding: 1.25rem;
margin-top: 1.25rem;
margin-bottom: 1.25rem;
border: 1px solid #eee;
border-left-width: .25rem;
border-radius: .25rem;
}
.bd-callout p:last-child {
margin-bottom: 0;
}
.bd-callout-info {
border-left-color: #5bc0de;
}
pre {
font-size: 80%;
margin-bottom: 0;
}
h1 small {
font-size: 50%;
}
h5 {
font-size: 1rem;
|
cstipkovic/spidermonkey-research
|
python/mozbuild/mozbuild/test/test_mozconfig.py
|
Python
|
mpl-2.0
| 17,780
| 0.000675
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import unicode_literals
import os
import unittest
from shutil import rmtree
from tempfile import (
gettempdir,
mkdtemp,
)
from mozfile.mozfile import NamedTemporaryFile
from mozunit import main
from mozbuild.mozconfig import (
MozconfigFindException,
MozconfigLoadException,
MozconfigLoader,
)
class TestMozconfigLoader(unittest.TestCase):
def setUp(self):
self._old_env = dict(os.environ)
os.environ.pop('MOZCONFIG', None)
os.environ.pop('MOZ_OBJDIR', None)
os.environ.pop('CC', None)
os.environ.pop('CXX', None)
self._temp_dirs = set()
def tearDown(self):
os.environ.clear()
os.environ.update(self._old_env)
for d in self._temp_dirs:
rmtree(d)
def get_loader(self):
return MozconfigLoader(self.get_temp_dir())
def get_temp_dir(self):
d = mkdtemp()
self._temp_dirs.add(d)
return d
def test_find_legacy_env(self):
"""Ensure legacy mozconfig path definitions result in error."""
os.environ[b'MOZ_MYCONFIG'] = '/foo'
with self.assertRaises(MozconfigFindException) as e:
self.get_loader().find_mozconfig()
self.assertTrue(e.exception.message.startswith('The MOZ_MYCONFIG'))
def test_find_multiple_configs(self):
"""Ensure multiple relative-path MOZCONFIGs result in error."""
relative_mozconfig = '.mconfig'
os.environ[b'MOZCONFIG'] = relative_mozconfig
srcdir = self.get_temp_dir()
curdir = self.get_temp_dir()
dirs = [srcdir, curdir]
loader = MozconfigLoader(srcdir)
for d in dirs:
path = os.path.join(d, relative_mozconfig)
with open(path, 'wb') as f:
f.write(path)
orig_dir = os.getcwd()
try:
os.chdir(curdir)
with self.assertRaises(MozconfigFindException) as e:
loader.find_mozconfig()
finally:
os.chdir(orig_dir)
self.assertIn('exists in more than one of', e.exception.message)
for d in dirs:
self.assertIn(d, e.exception.message)
def test_find_multiple_but_identical_configs(self):
"""Ensure multiple relative-path MOZCONFIGs pointing at the same file are OK."""
relative_mozconfig = '../src/.mconfig'
os.environ[b'MOZCONFIG'] = relative_mozconfig
topdir = self.get_temp_dir()
srcdir = os.path.join(topdir, 'src')
os.mkdir(srcdir)
curdir = os.path.join(topdir, 'obj')
os.mkdir(curdir)
loader = MozconfigLoader(srcdir)
path = os.path.join(
|
srcdir, relative_mozconfig)
with open(path, 'w'):
pass
orig_dir = os.getcwd()
try:
os.chdir(curdir)
self.assertEqual(os.path.realpath(loader.find_mozconfig()),
os.path.realpath(path))
finally:
os.chdir(orig_dir)
def test_find_no_relative_configs(self):
"""Ensure a missing relative-path MOZCONFIG is detected."""
relative_mozconfig = '.mconfig'
|
os.environ[b'MOZCONFIG'] = relative_mozconfig
srcdir = self.get_temp_dir()
curdir = self.get_temp_dir()
dirs = [srcdir, curdir]
loader = MozconfigLoader(srcdir)
orig_dir = os.getcwd()
try:
os.chdir(curdir)
with self.assertRaises(MozconfigFindException) as e:
loader.find_mozconfig()
finally:
os.chdir(orig_dir)
self.assertIn('does not exist in any of', e.exception.message)
for d in dirs:
self.assertIn(d, e.exception.message)
def test_find_relative_mozconfig(self):
"""Ensure a relative MOZCONFIG can be found in the srcdir."""
relative_mozconfig = '.mconfig'
os.environ[b'MOZCONFIG'] = relative_mozconfig
srcdir = self.get_temp_dir()
curdir = self.get_temp_dir()
dirs = [srcdir, curdir]
loader = MozconfigLoader(srcdir)
path = os.path.join(srcdir, relative_mozconfig)
with open(path, 'w'):
pass
orig_dir = os.getcwd()
try:
os.chdir(curdir)
self.assertEqual(os.path.normpath(loader.find_mozconfig()),
os.path.normpath(path))
finally:
os.chdir(orig_dir)
def test_find_abs_path_not_exist(self):
"""Ensure a missing absolute path is detected."""
os.environ[b'MOZCONFIG'] = '/foo/bar/does/not/exist'
with self.assertRaises(MozconfigFindException) as e:
self.get_loader().find_mozconfig()
self.assertIn('path that does not exist', e.exception.message)
self.assertTrue(e.exception.message.endswith('/foo/bar/does/not/exist'))
def test_find_path_not_file(self):
"""Ensure non-file paths are detected."""
os.environ[b'MOZCONFIG'] = gettempdir()
with self.assertRaises(MozconfigFindException) as e:
self.get_loader().find_mozconfig()
self.assertIn('refers to a non-file', e.exception.message)
self.assertTrue(e.exception.message.endswith(gettempdir()))
def test_find_default_files(self):
"""Ensure default paths are used when present."""
for p in MozconfigLoader.DEFAULT_TOPSRCDIR_PATHS:
d = self.get_temp_dir()
path = os.path.join(d, p)
with open(path, 'w'):
pass
self.assertEqual(MozconfigLoader(d).find_mozconfig(), path)
def test_find_multiple_defaults(self):
"""Ensure we error when multiple default files are present."""
self.assertGreater(len(MozconfigLoader.DEFAULT_TOPSRCDIR_PATHS), 1)
d = self.get_temp_dir()
for p in MozconfigLoader.DEFAULT_TOPSRCDIR_PATHS:
with open(os.path.join(d, p), 'w'):
pass
with self.assertRaises(MozconfigFindException) as e:
MozconfigLoader(d).find_mozconfig()
self.assertIn('Multiple default mozconfig files present',
e.exception.message)
def test_find_deprecated_path_srcdir(self):
"""Ensure we error when deprecated path locations are present."""
for p in MozconfigLoader.DEPRECATED_TOPSRCDIR_PATHS:
d = self.get_temp_dir()
with open(os.path.join(d, p), 'w'):
pass
with self.assertRaises(MozconfigFindException) as e:
MozconfigLoader(d).find_mozconfig()
self.assertIn('This implicit location is no longer',
e.exception.message)
self.assertIn(d, e.exception.message)
def test_find_deprecated_home_paths(self):
"""Ensure we error when deprecated home directory paths are present."""
for p in MozconfigLoader.DEPRECATED_HOME_PATHS:
home = self.get_temp_dir()
os.environ[b'HOME'] = home
path = os.path.join(home, p)
with open(path, 'w'):
pass
with self.assertRaises(MozconfigFindException) as e:
self.get_loader().find_mozconfig()
self.assertIn('This implicit location is no longer',
e.exception.message)
self.assertIn(path, e.exception.message)
def test_read_no_mozconfig(self):
# This is basically to ensure changes to defaults incur a test failure.
result = self.get_loader().read_mozconfig()
self.assertEqual(result, {
'path': None,
'topobjdir': None,
'configure_args': None,
'make_flags': None,
'make_extra': None,
'env': None,
'vars': None,
})
def test_read_empty_mozconfig(self):
with NamedTemporaryFile(mode='w') as mozconfig:
result = self.get_loader().read_mozconfig(mozconfig.name)
self.assertEqu
|
karllessard/tensorflow
|
tensorflow/tools/ci_build/linux/mkl/set-build-env.py
|
Python
|
apache-2.0
| 12,236
| 0.007682
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache L
|
icense, Version 2
|
.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Configure build environment for certain Intel platforms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import subprocess
BASIC_BUILD_OPTS = ["--cxxopt=-D_GLIBCXX_USE_CXX11_ABI=0", "--copt=-O3"]
SECURE_BUILD_OPTS = [
"--copt=-Wformat", "--copt=-Wformat-security", "--copt=-fstack-protector",
"--copt=-fPIC", "--copt=-fpic", "--linkopt=-znoexecstack",
"--linkopt=-zrelro", "--linkopt=-znow", "--linkopt=-fstack-protector"
]
class IntelPlatform(object):
min_gcc_major_version_ = 0
min_gcc_minor_version_ = 0
host_gcc_major_version_ = 0
host_gcc_minor_version_ = 0
BAZEL_PREFIX_ = "--copt="
ARCH_PREFIX_ = "-march="
FLAG_PREFIX_ = "-m"
def __init__(self, min_gcc_major_version, min_gcc_minor_version):
self.min_gcc_minor_version_ = min_gcc_minor_version
self.min_gcc_major_version_ = min_gcc_major_version
# Return True or False depending on whether
# The platform optimization flags can be generated by
# the gcc version specified in the parameters
def set_host_gcc_version(self, gcc_major_version, gcc_minor_version):
# True only if the gcc version in the tuple is >=
# min_gcc_major_version_, min_gcc_minor_version_
if gcc_major_version < self.min_gcc_major_version_:
print("Your MAJOR version of GCC is too old: {}; "
"it must be at least {}.{}".format(gcc_major_version,
self.min_gcc_major_version_,
self.min_gcc_minor_version_))
return False
elif gcc_major_version == self.min_gcc_major_version_ and \
gcc_minor_version < self.min_gcc_minor_version_:
print("Your MINOR version of GCC is too old: {}; "
"it must be at least {}.{}".format(gcc_minor_version,
self.min_gcc_major_version_,
self.min_gcc_minor_version_))
return False
print("gcc version OK: {}.{}".format(gcc_major_version, gcc_minor_version))
self.host_gcc_major_version_ = gcc_major_version
self.host_gcc_minor_version_ = gcc_minor_version
return True
# return a string with all the necessary bazel formatted flags for this
# platform in this gcc environment
def get_bazel_gcc_flags(self):
raise NotImplementedError(self)
# Returns True if the host gcc version is older than the gcc version in which
# the new march flag became available.
# Specify the version in which the new name usage began
def use_old_arch_names(self, gcc_new_march_major_version,
gcc_new_march_minor_version):
if self.host_gcc_major_version_ < gcc_new_march_major_version:
return True
elif self.host_gcc_major_version_ == gcc_new_march_major_version and \
self.host_gcc_minor_version_ < gcc_new_march_minor_version:
return True
return False
class NehalemPlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 8)
def get_bazel_gcc_flags(self):
NEHALEM_ARCH_OLD = "corei7"
NEHALEM_ARCH_NEW = "nehalem"
if self.use_old_arch_names(4, 9):
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
NEHALEM_ARCH_OLD + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
NEHALEM_ARCH_NEW + " "
class SandyBridgePlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 8)
def get_bazel_gcc_flags(self):
SANDYBRIDGE_ARCH_OLD = "corei7-avx"
SANDYBRIDGE_ARCH_NEW = "sandybridge"
if self.use_old_arch_names(4, 9):
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SANDYBRIDGE_ARCH_OLD + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SANDYBRIDGE_ARCH_NEW + " "
class HaswellPlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 8)
def get_bazel_gcc_flags(self):
HASWELL_ARCH_OLD = "core-avx2" # Only missing the POPCNT instruction
HASWELL_ARCH_NEW = "haswell"
POPCNT_FLAG = "popcnt"
if self.use_old_arch_names(4, 9):
ret_val = self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
HASWELL_ARCH_OLD + " "
return ret_val + self.BAZEL_PREFIX_ + self.FLAG_PREFIX_ + \
POPCNT_FLAG + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
HASWELL_ARCH_NEW + " "
class SkylakePlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 9)
def get_bazel_gcc_flags(self):
SKYLAKE_ARCH_OLD = "broadwell" # Only missing the POPCNT instruction
SKYLAKE_ARCH_NEW = "skylake-avx512"
# the flags that broadwell is missing: pku, clflushopt, clwb, avx512vl,
# avx512bw, avx512dq. xsavec and xsaves are available in gcc 5.x
# but for now, just exclude them.
AVX512_FLAGS = ["avx512f", "avx512cd"]
if self.use_old_arch_names(6, 1):
ret_val = self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SKYLAKE_ARCH_OLD + " "
for flag in AVX512_FLAGS:
ret_val += self.BAZEL_PREFIX_ + self.FLAG_PREFIX_ + flag + " "
return ret_val
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SKYLAKE_ARCH_NEW + " "
class CascadelakePlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 8, 3)
def get_bazel_gcc_flags(self):
CASCADELAKE_ARCH_OLD = "skylake-avx512" # Only missing the POPCNT instruction
CASCADELAKE_ARCH_NEW = "cascadelake"
# the flags that broadwell is missing: pku, clflushopt, clwb, avx512vl, avx512bw, avx512dq
VNNI_FLAG = "avx512vnni"
if IntelPlatform.use_old_arch_names(self, 9, 1):
ret_val = self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
CASCADELAKE_ARCH_OLD + " "
return ret_val + self.BAZEL_PREFIX_ + self.FLAG_PREFIX_ + \
VNNI_FLAG + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
CASCADELAKE_ARCH_NEW + " "
class BuildEnvSetter(object):
"""Prepares the proper environment settings for various Intel platforms."""
default_platform_ = "haswell"
PLATFORMS_ = {
"nehalem": NehalemPlatform(),
"sandybridge": SandyBridgePlatform(),
"haswell": HaswellPlatform(),
"skylake": SkylakePlatform(),
"cascadelake": CascadelakePlatform()
}
def __init__(self):
self.args = None
self.bazel_flags_ = "build "
self.target_platform_ = None
# Return a tuple of the current gcc version
def get_gcc_version(self):
gcc_major_version = 0
gcc_minor_version = 0
# check to see if gcc is present
gcc_path = ""
gcc_path_cmd = "command -v gcc"
try:
gcc_path = subprocess.check_output(gcc_path_cmd, shell=True,
stderr=subprocess.STDOUT).\
strip()
print("gcc located here: {}".format(gcc_path))
if not os.access(gcc_path, os.F_OK | os.X_OK):
raise ValueError(
"{} does not exist or is not executable.".format(gcc_path))
gcc_output = subprocess.check_output(
[gcc_path, "-dumpfullversion", "-dumpversion"],
stderr=subprocess.STDOUT).strip()
# handle python2 vs 3 (bytes vs str type)
if isinstance(gcc_output, bytes):
gcc_output = gcc_output.decode("utf-8")
print("gcc version: {}".format(gcc_output))
gcc_info = gcc_output.split(".")
gcc_majo
|
step21/inkscape-osx-packaging-native
|
packaging/macosx/Inkscape.app/Contents/Resources/extensions/radiusrand.py
|
Python
|
lgpl-2.1
| 3,583
| 0.014792
|
#!/usr/bin/env python
'''
Copyright (C) 2005 Aaron Spike, aaron@ekips.org
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
'''
import random, math, inkex, cubicsuperpath
def randomize((x, y), rx, ry, norm):
if norm:
r = abs(random.normalvariate(0.0,0.5*max(rx, ry)))
else:
r = random.uniform(0.0,max(rx, ry))
a = random.uniform(0.0,2*math.pi)
x += math.cos(a)*rx
y += math.sin(a)*ry
return [x, y]
class RadiusRandomize(inkex.Effect):
def __init__(self):
inkex.Effect.__init__(self)
self.OptionParser.add_option("--title")
self.OptionParser.add_option("-x", "--radiusx",
action="store", type="float",
dest="radiusx", default=10.0,
help="Randomly move nodes and handles within this radius, X")
self.OptionParser.add_option("-y", "--radiusy",
action="store", type="float",
dest="radiusy", default=10.0,
help="Randomly move nodes and handles within this radius, Y")
self.OptionParser.add_option("-c", "--ctrl",
action="store", type="inkbool",
dest="ctrl", default=True,
help="Randomize control points")
self.OptionParser.add_option("-e", "--end",
action="store", type="inkbool",
dest="end", default=True,
help="Randomize nodes")
self.OptionParser.add_option("-n", "--norm",
action="store", type="inkbool",
dest="norm", default=True,
help="Use normal distribution")
def effect(self):
for id, node in self.selected.iteritems():
if node.tag == inkex.addNS('path','svg'):
d = node.get('d')
p = cubicsuperpath.parsePath(d)
for subpath in p:
for csp in subpath:
if self.options.end:
|
delta=randomize([0,0], self.options.radiusx, self.options.radiusy, self.options.norm)
csp[0][0]+=delta[0]
csp[0][1]+=delta[1]
csp[1][0]+=delta[0]
csp[1][1]+=delta[1]
csp[2][0]+=delta[0]
csp[2][1]+=delta[1]
if self.options.ctrl:
csp[0]=randomize(csp[0], self.options.radiusx, self.options.radiusy, self.options.norm)
csp[2]=randomize(csp[2], self.options.radiusx, self.options.radiusy, self.options.norm)
node.set('d',cubicsuperpath.formatPath(p))
if __name__ == '__main__':
e = RadiusRandomize()
e.affect()
# vim: expandtab shiftwidth=4 tabstop=8 softtabstop=4 encoding=utf-8 textwidth=99
|
|
andrey-yemelyanov/competitive-programming
|
cp-book/ch1/adhoc/time/12148_Electricity.py
|
Python
|
mit
| 1,212
| 0.005776
|
# Problem name: 12148 Electricity
# Problem url: https://uva.onlinejudge.org/external/121/12148.pdf
# Author: Andrey Yemelyanov
import sys
|
import math
import datetime
def readline():
return sys.stdin.readline().strip()
def main():
while True:
n_readings = int(readline())
if n_readings == 0:
break
meter_readings = []
for i in range(n_read
|
ings):
reading = [int(x) for x in readline().split()]
date = datetime.date(reading[2], reading[1], reading[0])
consumption = reading[3]
meter_readings.append((date, consumption))
c = get_daily_consumption(meter_readings)
print(len(c), sum(c))
def get_daily_consumption(meter_readings):
c = []
for i in range(len(meter_readings)):
if i > 0:
current_date = meter_readings[i][0]
current_consumption = meter_readings[i][1]
prev_date = meter_readings[i - 1][0]
prev_consumption = meter_readings[i - 1][1]
if prev_date + datetime.timedelta(days = 1) == current_date:
c.append(current_consumption - prev_consumption)
return c
if __name__=="__main__":
main()
|
Ecotrust/forestplanner
|
idb_import/treelive_summary/unpivoted/local_util_test_dbh_flexibility.py
|
Python
|
bsd-3-clause
| 3,275
| 0.00855
|
import os
import sys
from django.core.management import setup_environ
thisdir = os.path.dirname(os.path.abspath(__file__))
appdir = os.path.realpath(os.path.join(thisdir, '..', '..', '..', 'lot'))
sys.path.append(appdir)
import settings
setup_environ(settings)
##############################
import pandas as pd
from django.db import connection
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def filter_stand_list(stand_list, min_candidates=3, tpa_factor=1.2, output="candidates_concat.csv"):
cursor = connection.cursor()
keep_going = True
tpa_matches = []
remaining = stand_list[::-1]
while keep_going:
where_clause_template = """(fia_forest_type_name = '%s' AND calc_dbh_class = %d)"""
where_clause_tpa_template = """(fia_forest_type_name = '%s' AND calc_dbh_class = %d AND SumOfTPA > %f AND SumOfTPA < %f)"""
where_clauses = []
for sc in stand_list:
if sc in tpa_matches:
where_clauses.append(where_clause_tpa_template % (sc[0], sc[1], sc[2]/tpa_factor, sc[2]*tpa_factor))
else:
where_clauses.append(where_clause_template % (sc[0], sc[1]))
where_clause = " \n OR ".join(where_clauses)
sql = """
SELECT * FROM (
SELECT
COND_ID,
SUM(SumOfTPA) as "Total_TPA",
SUM(SumOfBA_FT2_AC) as "Total_BAA",
SUM(pct_of_totalba) as "PCT_BA",
COUNT(SumOfTPA) as "class_matches",
AVG(COUNT_SPECIESSIZECLASSES) as "class_total"
FROM treelive_summary
WHERE
%(where_clause)s
GROUP BY COND_ID
) as subselect
WHERE class_matches = %(num_specified_classes)s
ORDER BY "class_matches" DESC, "PCT_BA" DESC
""" % { 'where_clause': where_clause,
'num_specified_classes': len(stand_list)}
print sql
|
cursor.execute(sql)
local_rows = dictfetchall(cursor)
num_candidates = len(local_rows)
print num_candidates
if num_candidates < 10:
# bail, use last known good query (ie don't assign local_rows to rows)
break
rows = loc
|
al_rows
if num_candidates <= min_candidates or len(tpa_matches) == len(stand_list):
keep_going = False
else:
tpa_matches.append(remaining.pop())
if rows:
df = pd.DataFrame(rows)
df.index = df['cond_id']
del df['cond_id']
print df[:25]
else:
print "*** NADA"
df.to_csv(output)
if __name__ == "__main__":
# This guy matches condition 1332 almost exactly
stand_list = [
# species, sizeclass, tpa
('Douglas-fir', 6, 160),
('Douglas-fir', 10, 31),
('Douglas-fir', 14, 7),
('Western hemlock', 14, 5),
#('Western redcedar', 14, 5),
#('Red alder', 6, 40),
]
filter_stand_list(stand_list, )
|
silenceli/nova
|
nova/api/openstack/compute/limits.py
|
Python
|
apache-2.0
| 15,344
| 0.000065
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Module dedicated functions/classes dealing with rate limiting requests.
This module handles rate liming at a per-user level, so it should not be used
to prevent intentional Denial of Service attacks, as we can assume a DOS can
easily come through multiple user accounts. DOS protection should be done at a
different layer. Instead this module should be used to protect against
unintentional user actions. With that in mind the limits set here should be
high enough as to not rate-limit any intentional actions.
To find good rate-limit values, check how long requests are taking (see logs)
in your environment to assess your capabilities and multiply out to get
figures.
NOTE: As the rate-limiting here is done in memory, this only works per
process (each process will have its own rate limiting counter).
"""
import collections
import copy
import httplib
import math
import re
import time
from oslo.serialization import jsonutils
from oslo.utils import importutils
import webob.dec
import webob.exc
from nova.api.openstack.compute.views import limits as limits_views
from nova.api.openstack import wsgi
from nova.i18n import _
from nova import quota
from nova import utils
from nova import wsgi as base_wsgi
QUOTAS = quota.QUOTAS
LIMITS_PREFIX = "limits."
class LimitsController(object):
"""Controller for accessing limits in the OpenStack API."""
def index(self, req):
"""Return all global and rate limit information."""
context = req.environ['nova.context']
project_id = req.params.get('tenant_id', context.project_id)
quotas = QUOTAS.get_project_quotas(context, project_id,
usages=False)
abs_limits = dict((k, v['limit']) for k, v in quotas.items())
rate_limits = req.environ.get("nova.limits", [])
builder = self._get_view_builder(req)
return builder.build(rate_limits, abs_limits)
def create(self, req, body):
"""Create a new limit."""
raise webob.exc.HTTPNotImplemented()
def delete(self, req, id):
"""Delete the limit."""
raise webob.exc.HTTPNotImplemented()
def detail(self, req):
"""Return limit details."""
raise webob.exc.HTTPNotImplemented()
def show(self, req, id):
"""Show limit information."""
raise webob.exc.HTTPNotImplemented()
def update(self, req, id, body):
"""Update existing limit."""
raise webob.exc.HTTPNotImplemented()
def _get_view_builder(self, req):
return limits_views.ViewBuilder()
def create_resource():
return wsgi.Resource(LimitsController())
class Limit(object):
"""Stores information about a limit for HTTP requests."""
UNITS = dict([(v, k) for k, v in utils.TIME_UNITS.items()])
def __init__(self, verb, uri, regex, value, unit):
"""Initialize a new `Limit`.
@param verb: HTTP verb (POST, PUT, etc.)
@param uri: Human-readable URI
@param regex: Regular expression format for this limit
@param value: Integer number of requests which can be made
@param unit: Unit of measure for the value parameter
"""
self.verb = verb
self.uri = uri
self.regex = regex
self.value = int(value)
self.unit = unit
self.unit_string = self.display_unit().lower()
self.remaining = int(value)
if value <= 0:
raise ValueError("Limit value must be > 0")
self.last_request = None
self.next_request = None
self.water_level = 0
self.capacity = self.unit
self.request_value = float(self.capacity) / float(self.value)
msg = (_("Only %(value)s %(verb)s request(s) can be "
"made to %(uri)s every %(unit_string)s.") %
{'value': self.value, 'verb': self.verb, 'uri': self.uri,
'unit_string': self.unit_string})
self.error_message = msg
def __call__(self, verb, url):
"""Represents a call to this limit from a relevant request.
@param verb: string http verb (POST, GET, etc.)
@param url: string URL
"""
if self.verb != verb or not re.match(self.regex, url):
return
now = self._get_time()
if self.last_request is None:
self.last_request = now
leak_value = now - self.last_request
self.water_level -= leak_value
self.water_level = max(self.water_level, 0)
self.water_level += self.request_value
difference = self.water_level - self.capacity
self.last_request = now
if difference > 0:
self.water_level -= self.request_value
self.next_request = now + difference
return difference
cap = self.capacity
water = self.water_level
val = self.value
self.remaining = math.floor(((cap - water) / cap) * val)
self.next_request = now
def _get_time(self):
"""Retrieve the current time. Broken out for testability."""
return time.time()
def display_unit(self):
"""Display the string name of the unit."""
return self.UNITS.get(self.unit, "UNKNOWN")
def display(self):
"""Return a useful representation of this class."""
return {
"verb": self.verb,
"URI": self.uri,
"regex": self.regex,
"value": self.value,
"remaining": int(self.remaining),
"unit": self.display_unit(),
"resetTime": int(self.next_request or self._get_time()),
}
# "Limit" format is a dictionary with the HTTP verb, human-readable URI,
# a regular-expression to match, value and unit of measure (PER_DAY, etc.)
DEFAULT_LIMITS = [
Limit("POST", "*", ".*", 120, utils.TIME_UNITS['MINUTE']),
Limit("POST", "*/servers", "^/servers", 120, utils.TIME_UNITS['MINUTE']),
Limit("PUT", "*", ".*", 120, utils.TIME_UNITS['MINUTE']),
Limit("GET", "*changes-since*", ".*changes-since.*", 120,
utils.TIME_UNITS['MINUTE']),
Limit("DELETE", "*", ".*", 120, utils.TIME_UNITS['MINUTE']),
Limit("GET", "*/os-fping", "^/os-fping", 12, utils.TIME_UNITS['MINUTE']),
]
class RateLimitingMiddleware(base_wsgi.Middleware):
"""Rate-limits requests passing through this middleware. All limit
information is stored in memory for this implementation.
"""
def __init__(self, application, limits=None, limiter=None, **kwargs):
"""Initialize new `RateLimitingMiddleware`.
It wraps the given WSGI application and sets up the given limits.
@param application: WSGI application to wrap
@param limits: String describing limits
@param limiter: String identifying class for representing limits
Other parameters are passed to the constructor for the limiter.
"""
base_wsgi.Middleware.__init__(self, application)
# Select the limiter class
if limiter is None:
limiter = Limiter
else:
limiter = importutils.import_clas
|
s(limiter)
# Parse the limits, if any are provided
if limits is not None:
limits = limiter.parse_limits(limits)
self._l
|
imiter = limiter(limits or DEFAULT_LIMITS, **kwargs)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
"""Represents a single call through this middleware.
We should record the request if we have a limit relevant to it.
If no limit is relevan
|
sio2project/oioioi
|
oioioi/suspendjudge/tests.py
|
Python
|
gpl-3.0
| 3,682
| 0.000543
|
from celery.exceptions import Ignore
from django.urls import reverse
from oioioi.base.tests import TestCase
from oioioi.contests.models import Contest, ProblemInstance, Submission
from oioioi.evalmgr.tasks import create_environ
from oioioi.programs.controllers import ProgrammingContestController
from oioioi.suspendjudge.handlers import check_problem_instance_state
class TestSuspendjudgeSuper(TestCase):
def _empty_post(self, login, view, problem_instance):
self.assertTrue(self.client.login(username=login))
url = reverse(
'oioioiadmin:suspendjudge_' + view,
kwargs={'problem_instance_id': problem_instance.id},
)
return self.client.post(url, {})
class TestViews(TestSuspendjudgeSuper):
fixtures = [
'test_users',
'test_permissions',
'test_contest',
'test_full_package',
'test_problem_instance',
]
def test_views_permissions(self):
problem_instance = ProblemInstance.objects.get()
|
login_codes = {'test_user': 403, 'test_admin': 302, 'test_contest_admin': 302}
views = [
'suspend_all',
'resume_and_rejudge',
'suspend_all_but_init',
'resume_and_clear',
]
self.client.get('/c/c/') # 'c' becomes the current contest
for login in logi
|
n_codes:
for view in views:
response = self._empty_post(login, view, problem_instance)
self.assertEqual(response.status_code, login_codes[login])
class TestSuspending(TestSuspendjudgeSuper):
fixtures = [
'test_users',
'test_contest',
'test_full_package',
'test_problem_instance',
'test_submission',
]
def test_handler_presence(self):
contest = Contest.objects.get()
submission = Submission.objects.get()
controller = ProgrammingContestController(contest)
env = create_environ()
env.setdefault('recipe', []).append(('dummy', 'dummy'))
env['extra_args'] = []
controller.fill_evaluation_environ(env, submission)
controller.finalize_evaluation_environment(env)
self.assertIn(
(
'check_problem_instance_state',
'oioioi.suspendjudge.handlers.check_problem_instance_state',
dict(suspend_init_tests=True),
),
env['recipe'],
)
self.assertIn(
(
'check_problem_instance_state',
'oioioi.suspendjudge.handlers.check_problem_instance_state',
),
env['recipe'],
)
def test_handler(self):
problem_instance = ProblemInstance.objects.get()
self.client.get('/c/c/') # 'c' becomes the current contest
self._empty_post('test_admin', 'suspend_all', problem_instance)
env = {
'problem_instance_id': problem_instance.id,
'job_id': 'dummy',
'celery_task_id': 'dummy',
'submission_id': 1,
'is_rejudge': False,
'report_kinds': ['INITIAL', 'NORMAL'],
}
with self.assertRaises(Ignore):
check_problem_instance_state(env, suspend_init_tests=True)
self._empty_post('test_admin', 'resume_and_clear', problem_instance)
self._empty_post('test_admin', 'suspend_all_but_init', problem_instance)
check_problem_instance_state(env, suspend_init_tests=True)
with self.assertRaises(Ignore):
check_problem_instance_state(env)
env['is_rejudge'] = True
env['report_kinds'] = ['HIDDEN']
check_problem_instance_state(env)
|
vlegoff/tsunami
|
src/secondaires/navigation/equipage/ordres/lever_ancre.py
|
Python
|
bsd-3-clause
| 2,720
| 0.000369
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant l'ordre LeverAncre."""
from secondaires.navigation.equipage.signaux import *
from ..ordre import *
class LeverAncre(Ordre):
"""Ordre lever_ancre.
Cet ordre est appelé pour demander à un matelot de lever l'ancre.
"""
cle = "lever_ancre"
etats_autorises = ("ancre", "")
def executer(self):
"""Exécute l'ordre : déplace le matelot."""
personnage = self.matelot.personnage
salle = personnage.salle
if not hasattr(salle, "ancre"):
return
ancre = salle.get_ele
|
ment("ancre")
if not ancre:
return
if not ancre.jetee:
yield SignalInutile("l'ancre est déjà levée")
else:
ancre.lever(personnage)
i = 0
while "ancre" in personnage.etats:
i += 1
if i > 100:
|
yield SignalAbandonne("J'ai essayé trop longtemps.")
elif personnage.stats.endurance < 40:
yield SignalRelais("Je suis trop fatigué.")
else:
yield 2
yield SignalTermine()
|
googleinterns/lasertagger
|
tagging_converter_test.py
|
Python
|
apache-2.0
| 4,946
| 0.002431
|
# coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import tagging
import tagging_converter
import tensorflow as tf
def tags_to_str(tags):
if not tags:
return ''
return '--'.join(map(str, tags))
class TaggingConverterTest(parameterized.TestCase):
@parameterized.parameters(
# A simple test.
{
'input_texts': ['Turing was born in 1912 .', 'Turing died in 1954 .'],
'target': 'Turing was born in 1912 and died in 1954 .',
'phrase_vocabulary': ['and'],
'target_tags': [
'KEEP', 'KEEP', 'KEEP', 'KEEP', 'KEEP', 'DELETE|and', 'DELETE',
'KEEP', 'KEEP', 'KEEP', 'KEEP'
],
},
# Test special characters.
{
'input_texts': ['Turing was born in 1912 .', 'Turing died in 1954 .'],
'target': 'Turing was born in 1912 ädåö died in 1954 .',
'phrase_vocabulary': ['ädåö'],
'target_tags': [
'KEEP', 'KEEP', 'KEEP', 'KEEP', 'KEEP', 'DELETE|ädåö', 'DELETE',
'KEEP', 'KEEP', 'KEEP', 'KEEP'
],
},
# Test swapping.
{
'input_texts': ['Turing was born in 1912 .', 'Turing died in 1954 .'],
'target': 'Turing died in 1954 and was born in 1912 .',
'phrase_vocabulary': ['and'],
'target_tags': [
'DELETE', 'KEEP', 'KEEP', 'KEEP', 'KEEP', 'SWAP', 'KEEP', 'KEEP',
'KEEP', 'KEEP', 'DELETE|and'
],
},
# Test complex swapping.
{
'input_texts': ['Turing was born in 1912 .',
'Turing was a pioneer in TCS .'],
'target': 'Turing , a pioneer in TCS , was born in 1912 .',
'phrase_vocabulary': [','],
'target_tags': [
'DELETE', 'KEEP', 'KEEP', 'KEEP', 'KEEP', 'SWAP', 'KEEP',
'DELETE|,', 'KEEP', 'KEEP', 'KEEP', 'KEEP', 'DELETE|,'
],
},
# Test that unnecessary phrases are not added.
{
'input_texts': ['A . And B .'],
'target': 'A , and B .',
|
'phrase_vocabulary': [',', 'and', ', and'],
# Although, it would be possible to add ", and" and delete "And", this
# shouldn't happen so that the tag
|
sequences are as simple as
# possible.
'target_tags': ['KEEP', 'DELETE|,', 'KEEP', 'KEEP', 'KEEP'],
},
# Test that necessary phrases are added.
{
'input_texts': ['A . And B .'],
'target': 'A , and B .',
'phrase_vocabulary': [', and'],
# Now we need to delete "And" since "," is not in the vocabulary
# anymore.
'target_tags': ['KEEP', 'DELETE|, and', 'DELETE', 'KEEP', 'KEEP'],
},
)
def test_matching_conversion(self, input_texts, target, phrase_vocabulary,
target_tags):
task = tagging.EditingTask(input_texts)
converter = tagging_converter.TaggingConverter(phrase_vocabulary)
tags = converter.compute_tags(task, target)
self.assertEqual(tags_to_str(tags), tags_to_str(target_tags))
def test_no_match(self):
input_texts = ['Turing was born in 1912 .', 'Turing died in 1954 .']
target = 'Turing was born in 1912 and died in 1954 .'
task = tagging.EditingTask(input_texts)
phrase_vocabulary = ['but']
converter = tagging_converter.TaggingConverter(phrase_vocabulary)
tags = converter.compute_tags(task, target)
# Vocabulary doesn't contain "and" so the inputs can't be converted to the
# target.
self.assertFalse(tags)
def test_first_deletion_idx_computation(self):
converter = tagging_converter.TaggingConverter([])
tag_strs = ['KEEP', 'DELETE', 'DELETE', 'KEEP']
tags = [tagging.Tag(s) for s in tag_strs]
source_token_idx = 3
idx = converter._find_first_deletion_idx(source_token_idx, tags)
self.assertEqual(idx, 1)
def test_phrase_vocabulary_extraction(self):
label_map = {'KEEP|, and': 0, 'DELETE|but': 1, 'DELETE': 2, 'KEEP|and': 3,
'DELETE|and': 4}
self.assertEqual(
tagging_converter.get_phrase_vocabulary_from_label_map(label_map),
{', and', 'but', 'and'})
if __name__ == '__main__':
tf.test.main()
|
blond-admin/BLonD
|
unittests/beams/test_beam_object.py
|
Python
|
gpl-3.0
| 14,105
| 0.001347
|
# coding: utf-8
# Copyright 2017 CERN. This software is distributed under the
# terms of the GNU General Public Licence version 3 (GPL Version 3),
# copied verbatim in the file LICENCE.md.
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization or
# submit itself to any jurisdiction.
# Project website: http://blond.web.cern.ch/
'''
Unit-tests for the Beam class.
Run as python testBeamObject.py in console or via travis
'''
# General imports
# -----------------
from __future__ import division, print_function
import unittest
import numpy
from scipy.constants import physical_constants
# BLonD imports
# --------------
from blond.beam.beam import Particle, Proton, Electron
from blond.input_parameters.ring import Ring
from blond.input_parameters.rf_parameters import RFStation
from blond.beam.beam import Beam
from blond.beam.distributions import matched_from_distribution_function
from blond.trackers.tracker import FullRingAndRF, RingAndRFTracker
import blond.utils.exceptions as blExcept
class testParticleClass(unittest.TestCase):
def setUp(self):
self.test_particle = Particle(1, 2)
def test_particle_attributes(self):
for attribute in ['mass', 'charge', 'radius_cl', 'C_gamma', 'C_q']:
self.assertTrue(hasattr(self.test_particle, attribute),
msg=f"Particle: no '{attribute}' attribute")
def test_attribute_types(self):
for attribute in ['mass', 'charge']:
self.assertIsInstance(getattr(self.test_particle, attribute), float,
msg=f"Particle: {attribute} is not a float")
def test_negative_restmass_exception(self):
with self.assertRaises(RuntimeError):
Particle(-1, 2)
class testElectron(unittest.TestCase):
def setUp(self):
self.electron = Electron()
def test_classical_electron_radius(self):
self.assertAlmostEqual(self.electron.radius_cl,
physical_constants['classical electron radius'][0], delta=1e-24,
msg='Electron: wrong classical elctron radius')
def test_Sand_radiation_constant(self):
# value from S. Lee: Accelerator Physics, 2nd ed., eq (4.5)
# convert from GeV^3 to eV^3
self.assertAlmostEqual(self.electron.C_gamma, 8.846e-5 / (1e9)**3, delta=1e-35,
msg='Electron: wrong radiation constant')
def test_quantum_radiation_constant(self):
# value from A. Wolski: Beam Dynamics in High Energy Accelerators, p. 233
self.assertAlmostEqual(self.electron.C_q, 3.832e-13, delta=1e-16,
msg='Electron: wrong quantum excitation constant')
class testProton(unittest.TestCase):
def setUp(self):
self.proton = Proton()
def test_classical_proton_radius(self):
# value from S. Lee: Accelerator Physics, 2nd ed., p. 560
self.assertAlmostEqual(self.proton.radius_cl, 1.5346986e-18, delta=1e-24,
msg='Proton: wrong classical proton radius')
def test_Sand_radiation_constant(self):
# value from S. Lee: Accelerator Physics, 2nd ed., eq (4.5)
# convert from GeV^3 to eV^3
self.assertAlmostEqual(self.proton.C_gamma, 7.783e-18 / (1e9)**3, delta=1e-48,
msg='Proton: wrong radiation constant')
class testBeamClass(unittest.TestCase):
# Run before every test
def setUp(self):
# Bunch parameters
# -----------------
N_turn = 200
N_b = 1e9 # Intensity
N_p = int(2e6) # Macro-particles
# Machine parameters
# --------------------
C = 6911.5038 # Machine circumference [m]
p = 450e9 # Synchronous momentum [eV/c]
gamma_t = 17.95142852 # Transition gamma
alpha = 1./gamma_t**2 # First order mom. comp. factor
# Define general parameters
# --------------------------
self.general_params = Ring(C, alpha, p, Proton(), N_turn)
# Define beam
# ------------
self.beam = Beam(self.general_params, N_p, N_b)
# Define RF section
# -----------------
self.rf_params = RFStation(self.general_params, [4620], [7e6], [0.])
# Run after every test
def tearDown(self):
del self.general_params
del self.beam
del self.rf_params
def test_variables_types(self):
self.assertIsInstance(self.beam.beta, float,
msg='Beam: beta is not a float')
self.assertIsInstance(self.beam.gamma, float,
msg='Beam: gamma is not a float')
self.assertIsInstance(self.beam.energy, float,
msg='Beam: energy is not a float')
self.assertIsInstance(self.beam.momentum, float,
msg='Beam: momentum is not a float')
self.assertIsInstance(self.beam.mean_dt, float,
msg='Beam: mean_dt is not a float')
self.assertIsInstance(self.beam.mean_dE, float,
msg='Beam: mean_dE is not a float')
self.assertIsInstance(self.beam.sigma_dt, float,
msg='Beam: sigma_dt is not a float')
self.assertIsInstance(self.beam.sigma_dE, float,
msg='Beam: sigma_dE is not a float')
self.assertIsInstance(self.beam.intensity, float,
msg='Beam: intensity is not a float')
self.assertIsInstance(self.beam.n_macroparticles, int,
msg='Beam: n_macroparticles is not an int')
self.assertIsInstance(self.beam.ratio, float,
msg='Beam: ratio is not a float')
self.assertIsInstance(self.beam.id, numpy.ndarray,
msg='Beam: id is not a numpy.array')
self.assertIn('int', type(self.beam.id[0]).__name__,
msg='Beam: id array does not contain int')
self.assertIsInstance(self.beam.n_macroparticles_lost, int,
msg='Beam: n_macroparticles_lost is not an int')
self.assertIsInstance(self.beam.n_macroparticles_alive, int,
msg='Beam: n_macroparticles_alive is not an int')
self.assertIsInstance(self.beam.dt, numpy.ndarray,
msg='Beam: dt is not a numpy.array')
self.assertIsInstance(self.beam.dE, numpy.ndarray,
msg='Beam: dE is not a numpy.array')
self.assertIn('float', t
|
ype(self.beam.dt[0]).__name__,
msg='Beam: dt does not contain float')
self.assertIn('float', type(self.beam.dE[0]).__name__,
msg='Beam: dE does not contain float')
def test_beam_statistic(self):
sigma_dt = 1.
sigma_dE = 1.
self.beam.dt = sigma_dt*numpy.random.randn(self.beam.n_macroparticle
|
s)
self.beam.dE = sigma_dE*numpy.random.randn(self.beam.n_macroparticles)
self.beam.statistics()
self.assertAlmostEqual(self.beam.sigma_dt, sigma_dt, delta=1e-2,
msg='Beam: Failed statistic sigma_dt')
self.assertAlmostEqual(self.beam.sigma_dE, sigma_dE, delta=1e-2,
msg='Beam: Failed statistic sigma_dE')
self.assertAlmostEqual(self.beam.mean_dt, 0., delta=1e-2,
msg='Beam: Failed statistic mean_dt')
self.assertAlmostEqual(self.beam.mean_dE, 0., delta=1e-2,
msg='Beam: Failed statistic mean_dE')
def test_losses_separatrix(self):
longitudinal_tracker = RingAndRFTracker(self.rf_params, self.beam)
full_tracker = FullRingAndRF([longitudinal_tracker])
try:
matched_from_distribution_function(self.beam,
full_tracker,
distribution_exponent=1.5,
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.