text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""
twtxt.cache
~~~~~~~~~~~
This module implements a caching system for storing tweets.
:copyright: (c) 2016-2022 by buckket.
:license: MIT, see LICENSE for more details.
"""
import logging
import os
import shelve
from time import time as timestamp
from click import get_app_dir
logger = logging.getLogger(__name__)
class Cache:
cache_dir = get_app_dir("twtxt")
cache_name = "cache"
def __init__(self, cache_file, cache, update_interval=0):
"""Initializes new :class:`Cache` object.
:param str cache_file: full path to the loaded cache file.
:param ~shelve.Shelve cache: a Shelve object, with cache loaded.
:param int update_interval: number of seconds the cache is considered to be
up-to-date without calling any external resources.
"""
self.cache_file = cache_file
self.cache = cache
self.update_interval = update_interval
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return self.close()
@classmethod
def from_file(cls, file, *args, **kwargs):
"""Try loading given cache file."""
try:
cache = shelve.open(file)
return cls(file, cache, *args, **kwargs)
except OSError as e:
logger.debug("Loading {0} failed".format(file))
raise e
@classmethod
def discover(cls, *args, **kwargs):
"""Make a guess about the cache file location and try loading it."""
file = os.path.join(Cache.cache_dir, Cache.cache_name)
return cls.from_file(file, *args, **kwargs)
@property
def last_updated(self):
"""Returns *NIX timestamp of last update of the cache."""
try:
return self.cache["last_update"]
except KeyError:
return 0
@property
def is_valid(self):
"""Checks if the cache is considered to be up-to-date."""
if timestamp() - self.last_updated <= self.update_interval:
return True
else:
return False
def mark_updated(self):
"""Mark cache as updated at current *NIX timestamp"""
if not self.is_valid:
self.cache["last_update"] = timestamp()
def is_cached(self, url):
"""Checks if specified URL is cached."""
try:
return True if url in self.cache else False
except TypeError:
return False
def last_modified(self, url):
"""Returns saved 'Last-Modified' header, if available."""
try:
return self.cache[url]["last_modified"]
except KeyError:
return None
def add_tweets(self, url, last_modified, tweets):
"""Adds new tweets to the cache."""
try:
self.cache[url] = {"last_modified": last_modified, "tweets": tweets}
self.mark_updated()
return True
except TypeError:
return False
def get_tweets(self, url, limit=None):
"""Retrieves tweets from the cache."""
try:
tweets = self.cache[url]["tweets"]
self.mark_updated()
return sorted(tweets, reverse=True)[:limit]
except KeyError:
return []
def remove_tweets(self, url):
"""Tries to remove cached tweets."""
try:
del self.cache[url]
self.mark_updated()
return True
except KeyError:
return False
def close(self):
"""Closes Shelve object."""
try:
self.cache.close()
return True
except AttributeError:
return False
def sync(self):
"""Syncs Shelve object."""
try:
self.cache.sync()
return True
except AttributeError:
return False
|
{
"content_hash": "b8f1656f9ffe4272569ddeaca7b68cb9",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 86,
"avg_line_length": 28.733333333333334,
"alnum_prop": 0.5638051044083526,
"repo_name": "buckket/twtxt",
"id": "4c90d0039a26416e435fda49a7fd584ffbff2c02",
"size": "3879",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "twtxt/cache.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "574"
},
{
"name": "Python",
"bytes": "64161"
}
],
"symlink_target": ""
}
|
from django.core.management.base import BaseCommand
from ...utils.lgUtils import *
from ...utils.earthquakesUtils import *
import requests
import json
from datetime import timedelta
import simplekml
from polycircles import polycircles
import datetime
from json.decoder import JSONDecodeError
class Command(BaseCommand):
help = 'Generate Last Week Earthquakes KML'
today = datetime.datetime.today()
pastWeek = today - timedelta(days=7)
pastWeekFormatted = pastWeek.strftime("%Y-%m-%d")
def handle(self, *args, **options):
self.generateLastWeekEarthquakesKml()
self.stdout.write(self.style.SUCCESS('Generated Latest Week Earthquakes'))
def generateLastWeekEarthquakesKml(self):
self.stdout.write("Generating Latest Week Earthquakes KMZ... ")
self.stdout.write("Getting Earthquakes")
sparkIp = getSparkIp()
response = requests.get('http://' + sparkIp + ':5000/getEarthquakes?date=' + self.pastWeekFormatted)
jsonData = json.loads(response.json())
numberObtained = len(jsonData)
self.stdout.write("Obtained " + str(numberObtained) + " earthquakes")
self.createKml(jsonData, numberObtained)
def createKml(self, jsonData, numberObtained):
kml = simplekml.Kml()
tour = kml.newgxtour(name="LastWeekEarthquakesTour")
playlist = tour.newgxplaylist()
balloonDuration = 1
flyToDuration = 3
if numberObtained > 1000:
balloonDuration = numberObtained / 1000
self.stdout.write("Default duration: " + str(balloonDuration))
earthquakeNumber = 1
for row in jsonData:
if earthquakeNumber > 666:
break
self.stdout.write(str(earthquakeNumber))
place = row["place"]
latitude = row["latitude"]
longitude = row["longitude"]
magnitude = row["magnitude"]
try:
geoJson = replaceJsonString(str(row["geojson"]))
infowindow = self.populateInfoWindow(row, geoJson)
except JSONDecodeError:
self.stdout.write(self.style.ERROR('Error decoding json'))
self.stdout.write(str(row["geojson"]))
continue
try:
if magnitude is not None:
absMagnitude = abs(float(magnitude))
color = simplekml.Color.grey
if absMagnitude <= 2:
color = simplekml.Color.green
elif 2 < absMagnitude <= 5:
color = simplekml.Color.orange
elif absMagnitude > 5:
color = simplekml.Color.red
playlist.newgxwait(gxduration=3 * balloonDuration)
polycircle = polycircles.Polycircle(latitude=latitude, longitude=longitude,
radius=2000 * absMagnitude, number_of_vertices=100)
pol = kml.newpolygon(name="", description=infowindow, outerboundaryis=polycircle.to_kml())
pol.style.polystyle.color = color
pol.style.polystyle.fill = 0
pol.style.polystyle.outline = 1
pol.style.linestyle.color = color
pol.style.linestyle.width = 10
pol.style.balloonstyle.bgcolor = simplekml.Color.lightblue
pol.style.balloonstyle.text = "$[description]"
pol.visibility = 0
''' Fly To the atmosphere '''
flyto = playlist.newgxflyto(gxduration=flyToDuration,
gxflytomode=simplekml.GxFlyToMode.smooth)
flyto.camera.longitude = longitude
flyto.camera.latitude = latitude
flyto.camera.altitude = 15000000
flyto.camera.range = 15000000
flyto.camera.tilt = 0
playlist.newgxwait(gxduration=flyToDuration)
''' Go Back To the point '''
flyto = playlist.newgxflyto(gxduration=flyToDuration,
gxflytomode=simplekml.GxFlyToMode.smooth)
flyto.camera.longitude = longitude
flyto.camera.latitude = latitude
flyto.camera.altitude = 100000
flyto.camera.range = 100000
flyto.camera.tilt = 0
playlist.newgxwait(gxduration=flyToDuration)
self.simulateEarthquake(playlist, latitude, longitude, absMagnitude)
animatedupdateshow = playlist.newgxanimatedupdate(gxduration=balloonDuration / 10)
animatedupdateshow.update.change = '<Placemark targetId="{0}">' \
'<visibility>1</visibility></Placemark>' \
.format(pol.placemark.id)
for i in range(1, 11):
polycircleAux = polycircles.Polycircle(latitude=latitude, longitude=longitude,
radius=(200 * i) * absMagnitude,
number_of_vertices=100)
polAux = kml.newpolygon(name=place, outerboundaryis=polycircleAux.to_kml())
polAux.style.polystyle.color = color
polAux.style.polystyle.fill = 1
polAux.style.polystyle.outline = 1
polAux.style.linestyle.color = color
polAux.style.linestyle.width = 1
polAux.visibility = 0
polAux.style.balloonstyle.displaymode = simplekml.DisplayMode.hide
animatedupdateshow = playlist.newgxanimatedupdate(gxduration=balloonDuration / 10)
animatedupdateshow.update.change = '<Placemark targetId="{0}">' \
'<visibility>1</visibility></Placemark>' \
.format(polAux.placemark.id)
animatedupdatehide = playlist.newgxanimatedupdate(gxduration=balloonDuration / 10)
animatedupdatehide.update.change = '<Placemark targetId="{0}">' \
'<visibility>0</visibility></Placemark>' \
.format(polAux.placemark.id)
playlist.newgxwait(gxduration=balloonDuration / 10)
animatedupdateshow = playlist.newgxanimatedupdate(gxduration=balloonDuration * 2)
animatedupdateshow.update.change = '<Placemark targetId="{0}"><visibility>1</visibility>' \
'<gx:balloonVisibility>1</gx:balloonVisibility></Placemark>' \
.format(pol.placemark.id)
playlist.newgxwait(gxduration=10)
animatedupdatehide = playlist.newgxanimatedupdate(gxduration=balloonDuration * 2)
animatedupdatehide.update.change = '<Placemark targetId="{0}">' \
'<gx:balloonVisibility>0</gx:balloonVisibility></Placemark>' \
.format(pol.placemark.id)
animatedupdateshow = playlist.newgxanimatedupdate(gxduration=balloonDuration / 10)
animatedupdateshow.update.change = '<Placemark targetId="{0}">' \
'<visibility>1</visibility></Placemark>' \
.format(pol.placemark.id)
except ValueError:
kml.newpoint(name=place, description=infowindow, coords=[(longitude, latitude)])
self.stdout.write(str(absMagnitude))
earthquakeNumber += 1
playlist.newgxwait(gxduration=3 * balloonDuration)
fileName = "lastWeekEarthquakes.kmz"
currentDir = os.getcwd()
dir1 = os.path.join(currentDir, "static/demos")
dirPath2 = os.path.join(dir1, fileName)
self.stdout.write("Saving kml: " + str(dirPath2))
if os.path.exists(dirPath2):
os.remove(dirPath2)
kml.savekmz(dirPath2, format=False)
@staticmethod
def simulateEarthquake(playlist, latitude, longitude, magnitude):
for i in range(0, int(10*magnitude)):
bounce = 5 if (i % 2 == 0) else 0
flyto = playlist.newgxflyto(gxduration=0.01)
flyto.camera.longitude = longitude
flyto.camera.latitude = latitude
flyto.camera.altitude = 150000
flyto.camera.range = 150000
flyto.camera.tilt = bounce
playlist.newgxwait(gxduration=0.01)
@staticmethod
def populateInfoWindow(row, jsonData):
latitude = row["latitude"]
longitude = row["longitude"]
magnitude = row["magnitude"]
depth = row["depth"]
fecha = row["fecha"]
datetimeStr = datetime.datetime.fromtimestamp(int(fecha) / 1000).strftime('%Y-%m-%d %H:%M:%S')
url = jsonData.get("properties").get("url")
contentString = '<link rel = "stylesheet" href = ' \
'"https://code.getmdl.io/1.3.0/material.blue_grey-red.min.css" / > ' + \
'<link rel="stylesheet" href="https://fonts.googleapis.com/css?' \
'family=Roboto:regular,bold,italic,thin,light,bolditalic,black,medium&lang=en"/>' + \
'<table style="font-family: Roboto;"><tr><td>' + \
'<div id="content">' + '<div id="siteNotice">' + '</div>' + \
'<h1 id="firstHeading" class="thirdHeading" style="text-align:center">' + \
str(row["place"]) + '</h1>' + \
'<h2 id="firstHeading" class="thirdHeading" style="text-align:center">Date: ' + \
str(datetimeStr) + '</h2>' + \
'<h3 id="firstHeading" class="thirdHeading" style="text-align:center">Magnitude: ' + \
str(magnitude) + '</h3>' + \
'<div id="bodyContent" style="text-align: center;">' + \
'<div class="demo-charts mdl-color--white mdl-shadow--2dp mdl-cell' \
' mdl-cell--6-col mdl-grid" style="width: 98%">' + \
'<div class="mdl-cell mdl-cell--3-col mdl-layout-spacer">' + \
'<p style="font-size:1.5em;color:#474747;line-height:0.5;"><b>Latitude</b>:</p>' + \
'</div>' + \
'<div class="mdl-cell mdl-cell--3-col mdl-layout-spacer">' + \
'<p style="font-size:1.5em;color:#474747;line-height:0.5;">' + str(latitude) + '</p>' + \
'</div>' + \
'<div class="mdl-cell mdl-cell--3-col mdl-layout-spacer">' + \
'<p style="font-size:1.5em;color:#474747;line-height:0.5;"><b>Longitude</b>:</p>' + \
'</div>' + \
'<div class="mdl-cell mdl-cell--3-col mdl-layout-spacer">' + \
'<p style="font-size:1.5em;color:#474747;line-height:0.5;">' + str(longitude) + '</p>' + \
'</div>' + \
'<div class="mdl-cell mdl-cell--3-col mdl-layout-spacer">' + \
'<p style="font-size:1.5em;color:#474747;line-height:0.5;"><b>Depth</b>:</p>' + \
'</div>' + \
'<div class="mdl-cell mdl-cell--3-col mdl-layout-spacer">' + \
'<p style="font-size:1.5em;color:#474747;line-height:0.5;">' + str(depth) + ' km</p>' + \
'</div>' + \
'<div class="mdl-cell mdl-cell--3-col mdl-layout-spacer">' + \
'<p style="font-size:1.5em;color:#474747;line-height:0.5;">More Info :</p>' + \
'</div>' + \
'<div class="mdl-cell mdl-cell--3-col mdl-layout-spacer">' + \
'<p style="font-size:1.5em;color:#474747;line-height:0.5;"><a href=' + str(url) + \
' target="_blank">USGS</a></p>' + \
'</div>' + \
'</div>' + \
'</div></div>' + \
'</td></tr></table>'
return contentString
|
{
"content_hash": "78593edf2c02f1879028e93aed5e8261",
"timestamp": "",
"source": "github",
"line_count": 248,
"max_line_length": 117,
"avg_line_length": 51.16532258064516,
"alnum_prop": 0.5163527464733233,
"repo_name": "navijo/FlOYBD",
"id": "d2f86762c61287ed4fba1735983cad9a053810e9",
"size": "12689",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Django/mysite/floybd/management/commands/getLastWeekEarthquakes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18712"
},
{
"name": "HTML",
"bytes": "93263"
},
{
"name": "JavaScript",
"bytes": "507197"
},
{
"name": "PHP",
"bytes": "4606"
},
{
"name": "Python",
"bytes": "385150"
},
{
"name": "Shell",
"bytes": "8966"
}
],
"symlink_target": ""
}
|
from PySide import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(606, 443)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Dialog.sizePolicy().hasHeightForWidth())
Dialog.setSizePolicy(sizePolicy)
self.horizontalLayout_4 = QtGui.QHBoxLayout(Dialog)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.groupBox_4 = QtGui.QGroupBox(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_4.sizePolicy().hasHeightForWidth())
self.groupBox_4.setSizePolicy(sizePolicy)
self.groupBox_4.setObjectName("groupBox_4")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.groupBox_4)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.txt_remove = QtGui.QLineEdit(self.groupBox_4)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.txt_remove.sizePolicy().hasHeightForWidth())
self.txt_remove.setSizePolicy(sizePolicy)
self.txt_remove.setObjectName("txt_remove")
self.verticalLayout_3.addWidget(self.txt_remove)
self.verticalLayout_2.addWidget(self.groupBox_4)
self.groupBox_5 = QtGui.QGroupBox(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_5.sizePolicy().hasHeightForWidth())
self.groupBox_5.setSizePolicy(sizePolicy)
self.groupBox_5.setObjectName("groupBox_5")
self.groupBox = QtGui.QGroupBox(self.groupBox_5)
self.groupBox.setGeometry(QtCore.QRect(17, 104, 392, 72))
self.groupBox.setObjectName("groupBox")
self.rb_nr_beginning = QtGui.QRadioButton(self.groupBox)
self.rb_nr_beginning.setGeometry(QtCore.QRect(0, 20, 105, 21))
self.rb_nr_beginning.setChecked(True)
self.rb_nr_beginning.setObjectName("rb_nr_beginning")
self.rb_nr_ending = QtGui.QRadioButton(self.groupBox)
self.rb_nr_ending.setGeometry(QtCore.QRect(120, 20, 105, 21))
self.rb_nr_ending.setObjectName("rb_nr_ending")
self.layoutWidget = QtGui.QWidget(self.groupBox_5)
self.layoutWidget.setGeometry(QtCore.QRect(17, 25, 261, 63))
self.layoutWidget.setObjectName("layoutWidget")
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.layoutWidget)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.rb_addNmbrs = QtGui.QCheckBox(self.layoutWidget)
self.rb_addNmbrs.setChecked(True)
self.rb_addNmbrs.setObjectName("rb_addNmbrs")
self.horizontalLayout_2.addWidget(self.rb_addNmbrs)
self.groupBox_6 = QtGui.QGroupBox(self.layoutWidget)
self.groupBox_6.setObjectName("groupBox_6")
self.verticalLayout_4 = QtGui.QVBoxLayout(self.groupBox_6)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.txt_startnmbr = QtGui.QLineEdit(self.groupBox_6)
self.txt_startnmbr.setObjectName("txt_startnmbr")
self.verticalLayout_4.addWidget(self.txt_startnmbr)
self.horizontalLayout_2.addWidget(self.groupBox_6)
self.verticalLayout_2.addWidget(self.groupBox_5)
self.groupBox_2 = QtGui.QGroupBox(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_2.sizePolicy().hasHeightForWidth())
self.groupBox_2.setSizePolicy(sizePolicy)
self.groupBox_2.setObjectName("groupBox_2")
self.verticalLayout = QtGui.QVBoxLayout(self.groupBox_2)
self.verticalLayout.setObjectName("verticalLayout")
self.txt_custom = QtGui.QLineEdit(self.groupBox_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.txt_custom.sizePolicy().hasHeightForWidth())
self.txt_custom.setSizePolicy(sizePolicy)
self.txt_custom.setObjectName("txt_custom")
self.verticalLayout.addWidget(self.txt_custom)
self.groupBox_3 = QtGui.QGroupBox(self.groupBox_2)
self.groupBox_3.setObjectName("groupBox_3")
self.horizontalLayout = QtGui.QHBoxLayout(self.groupBox_3)
self.horizontalLayout.setObjectName("horizontalLayout")
self.rb_txt_beginning = QtGui.QRadioButton(self.groupBox_3)
self.rb_txt_beginning.setChecked(True)
self.rb_txt_beginning.setObjectName("rb_txt_beginning")
self.horizontalLayout.addWidget(self.rb_txt_beginning)
self.rb_txt_ending = QtGui.QRadioButton(self.groupBox_3)
self.rb_txt_ending.setObjectName("rb_txt_ending")
self.horizontalLayout.addWidget(self.rb_txt_ending)
self.verticalLayout.addWidget(self.groupBox_3)
self.verticalLayout_2.addWidget(self.groupBox_2)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout_2.addWidget(self.buttonBox)
self.horizontalLayout_4.addLayout(self.verticalLayout_2)
self.verticalLayout_5 = QtGui.QVBoxLayout()
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.lW_source = QtGui.QListWidget(Dialog)
self.lW_source.setObjectName("lW_source")
self.verticalLayout_5.addWidget(self.lW_source)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.pB_preview = QtGui.QPushButton(Dialog)
self.pB_preview.setObjectName("pB_preview")
self.horizontalLayout_3.addWidget(self.pB_preview)
self.pB_iterate = QtGui.QPushButton(Dialog)
self.pB_iterate.setObjectName("pB_iterate")
self.horizontalLayout_3.addWidget(self.pB_iterate)
self.verticalLayout_5.addLayout(self.horizontalLayout_3)
self.lW_result = QtGui.QListWidget(Dialog)
self.lW_result.setObjectName("lW_result")
self.verticalLayout_5.addWidget(self.lW_result)
self.horizontalLayout_4.addLayout(self.verticalLayout_5)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Dialog", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_4.setTitle(QtGui.QApplication.translate("Dialog", "remove Substrings (seperator ;)", None, QtGui.QApplication.UnicodeUTF8))
self.txt_remove.setText(QtGui.QApplication.translate("Dialog", "<allNumbers>; ", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_5.setTitle(QtGui.QApplication.translate("Dialog", "add numbers", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("Dialog", "add to:", None, QtGui.QApplication.UnicodeUTF8))
self.rb_nr_beginning.setText(QtGui.QApplication.translate("Dialog", "beginning", None, QtGui.QApplication.UnicodeUTF8))
self.rb_nr_ending.setText(QtGui.QApplication.translate("Dialog", "ending", None, QtGui.QApplication.UnicodeUTF8))
self.rb_addNmbrs.setText(QtGui.QApplication.translate("Dialog", "Add Nmbrs", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_6.setTitle(QtGui.QApplication.translate("Dialog", "startnumber", None, QtGui.QApplication.UnicodeUTF8))
self.txt_startnmbr.setText(QtGui.QApplication.translate("Dialog", "0", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setTitle(QtGui.QApplication.translate("Dialog", "add custom txt:", None, QtGui.QApplication.UnicodeUTF8))
self.txt_custom.setText(QtGui.QApplication.translate("Dialog", "_", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_3.setTitle(QtGui.QApplication.translate("Dialog", "add to:", None, QtGui.QApplication.UnicodeUTF8))
self.rb_txt_beginning.setText(QtGui.QApplication.translate("Dialog", "beginning", None, QtGui.QApplication.UnicodeUTF8))
self.rb_txt_ending.setText(QtGui.QApplication.translate("Dialog", "ending", None, QtGui.QApplication.UnicodeUTF8))
self.pB_preview.setText(QtGui.QApplication.translate("Dialog", "Preview", None, QtGui.QApplication.UnicodeUTF8))
self.pB_iterate.setText(QtGui.QApplication.translate("Dialog", "Iterate!", None, QtGui.QApplication.UnicodeUTF8))
|
{
"content_hash": "44fc09ffdde28b18f5e2201cb1a9c569",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 145,
"avg_line_length": 64.58620689655173,
"alnum_prop": 0.7245061398825414,
"repo_name": "tzimm/SeRe",
"id": "356004d5efe3479de9a543b6bd7a0bf663de09bd",
"size": "9612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SeRe_RenameBox.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27543"
}
],
"symlink_target": ""
}
|
"""User interface for Win32 terminals."""
#
# (C) Pywikibot team, 2003-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
import re
from pywikibot.userinterfaces import (
terminal_interface_base,
win32_unicode,
)
try:
import ctypes
ctypes_found = True
except ImportError:
ctypes_found = False
windowsColors = {
'default': 7,
'black': 0,
'blue': 1,
'green': 2,
'aqua': 3,
'red': 4,
'purple': 5,
'yellow': 6,
'lightgray': 7,
'gray': 8,
'lightblue': 9,
'lightgreen': 10,
'lightaqua': 11,
'lightred': 12,
'lightpurple': 13,
'lightyellow': 14,
'white': 15,
}
colorTagR = re.compile('\03{(?P<name>%s)}' % '|'.join(list(windowsColors.keys())))
# Compat for python <= 2.5
class Win32BaseUI(terminal_interface_base.UI):
"""User interface for Win32 terminals without ctypes."""
def __init__(self):
"""Constructor."""
terminal_interface_base.UI.__init__(self)
self.encoding = 'ascii'
class Win32CtypesUI(Win32BaseUI):
"""User interface for Win32 terminals using ctypes."""
def __init__(self):
"""Constructor."""
Win32BaseUI.__init__(self)
(stdin, stdout, stderr, argv) = win32_unicode.get_unicode_console()
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.argv = argv
self.encoding = 'utf-8'
def support_color(self, target_stream):
"""Return whether the target stream supports actually color."""
return getattr(target_stream, '_hConsole', None) is not None
def encounter_color(self, color, target_stream):
"""Set the new color."""
ctypes.windll.kernel32.SetConsoleTextAttribute(
target_stream._hConsole, windowsColors[color])
def _raw_input(self):
data = self.stdin.readline()
# data is in both Python versions str but '\x1a' is unicode in Python 2
# so explicitly convert into str as it otherwise tries to decode data
if str('\x1a') in data:
raise EOFError()
return data.strip()
if ctypes_found:
Win32UI = Win32CtypesUI
else:
Win32UI = Win32BaseUI
|
{
"content_hash": "5f73f51994a3afbe9b6aac090066f726",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 82,
"avg_line_length": 25.41304347826087,
"alnum_prop": 0.5945252352437981,
"repo_name": "smalyshev/pywikibot-core",
"id": "43db049d0ea121cd998b7719e8f5a99d3cd4f623",
"size": "2362",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pywikibot/userinterfaces/terminal_interface_win32.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "97"
},
{
"name": "Python",
"bytes": "4195506"
}
],
"symlink_target": ""
}
|
import npyscreen
class HelpForm(npyscreen.ActionFormWithMenus):
""" Help form for the Vent CLI """
@staticmethod
def switch(page):
def popup(page):
info_str = ''
if page == 'Menu':
info_str = """
Menu interactions are simple! Here is a quick guide to get you
familiar.
Navigation of a page: Up, Down, Left, Right, or TAB. Note that
SHIFT+TAB can be used to reverse cycle!
Editing a page: Simply navigating to an editable field and
typing should be enough to edit most pages. ENTER can you be
used to select or deselect options, or to open drop down menus.
CTRL+T: Will toggle between two pages.
CTRL+Q: Will take you back to main. Or from main, will exit the
application.
CTRL+X: Can be used to open up menus on certain pages.
"""
elif page == 'Plugins':
info_str = """
Plugins are user created software hosted on GitHub that Vent
can install and run. Plugins are developed following a hybrid
of requirements specified both by Docker and Vent. Vent uses
Docker to run all plugins so all plugins should be designed to
run as a system of containers. Knowledge of linking docker
containers may be necessary for more complex tasks that require
creating multiple containers for your plugin. For Help on
building Plugins, check out the Working with Plugins section in
our Help Menu."""
elif page == 'Tools':
info_str = """
Tools are the individual building blocks of a Plugin. Each tool
should follow S.R.P, and over the entirety of the Plugin should
be able accomplish any task desired! For Help on building
Tools, check out the Working with Plugins section in our Help
Menu."""
elif page == 'Filetypes':
info_str = """
The filetypes Vent can support are entirely based on the
installed Plugins. Each plugin is ultimately responsible for
doing some form of processing."""
elif page == 'Status':
info_str = """
You'll notice Vent offers several status types amongst
tools/plugins. Built means that each tool has a Docker image
successfully built based off the provided specs for that
tool/plugin. Enabled/Disabled correspond to user defined
settings to enable or disable a tool or set of tools (plugin).
Installed means simply that the plugin has been cloned from
GitHub and installed to the Vent filesystem. No Docker image
has been created yet. Running means that a Docker container has
successfully been created from the corresponding Docker image
for a specific tool in a Plugin."""
elif page == 'Plugin Adding':
info_str = """
To add a plugin that you've created, simply open up the Menu
from the main page using ^X. After, press "p" to open up the
Plugin menu and then "a" to drop down into our Plugin
installation screen. To add a Plugin, we require a valid
GitHub repository. If your repository is private, you will
need to enter a username and password. Once you have finished
that, select OK. If we are successfully able to connect, you
should see your repositories branches listed in our Plugin
options menu. From here, press TAB to cycle between the
options, and ENTER to select different branches to install and
build from. You can even choose a specific commit if you like!
Once you've selected those tools and selected OK, Vent will
notify you about all tools it has detected. For more
information about how Vent detects tools, see our "Building a
Plugin" section. You may select or deselect the tools you wish
to install as part of your Plugin. When you are done, select
OK. If everything works you should get a successful Add. Select
OK, to be returned to the main screen!"""
elif page == 'Plugin Building':
# !! TODO
info_str = """Stay tuned!"""
npyscreen.notify_confirm(info_str,
title='About Vent ' + page,
wide=True)
popup(page)
def create(self):
""" Override method for creating FormBaseNew form """
self.add_handlers({'^T': self.change_forms, '^Q': self.exit})
self.addfield = self.add(npyscreen.TitleFixedText, name='Vent',
labelColor='DEFAULT', editable=False)
self.multifield1 = self.add(npyscreen.MultiLineEdit, editable=False,
value="""
About Vent
Vent is a library that includes a CLI designed to serve as a general
platform for analyzing network traffic. Built with some basic
functionality, Vent serves as a user-friendly platform to build custom
plugins on to perform user-defined processing on incoming network data.
Vent supports any filetype, but only processes ones based on the types
of plugins installed for that instance of Vent. Simply create your
plugins, point vent to them & install them, and drop a file in vent to
begin processing!
For a detailed explanation of Vent Concepts, check out the General
section in our Help Menu. Topics include: Vent Plugins, Tools,
Filetypes, and Statuses! Use ^X to access the menu and ESC to
close it.
Select CANCEL or ^Q to return to the Main Menu. Select OK or ^T to
return to your previous menu.
PRO TIP: You can use TAB to cycle through options.
""")
self.m2 = self.add_menu(name='Vent Basics', shortcut='b')
self.m2.addItem(text='Menu Interactions', onSelect=HelpForm.switch,
arguments=['Menu'], shortcut='m')
self.m2.addItem(text='Plugins', onSelect=HelpForm.switch,
arguments=['Plugins'], shortcut='p')
self.m2.addItem(text='Tools', onSelect=HelpForm.switch,
arguments=['Tools'], shortcut='t')
self.m2.addItem(text='Filetypes', onSelect=HelpForm.switch,
arguments=['Filetypes'], shortcut='f')
self.m2.addItem(text='Statuses', onSelect=HelpForm.switch,
arguments=['Status'], shortcut='s')
self.m3 = self.add_menu(name='Working with Plugins', shortcut='p')
self.m3.addItem(text='Adding a Plugin', onSelect=HelpForm.switch,
arguments=['Plugin Adding'], shortcut='a')
self.m3.addItem(text='Building a Plugin', onSelect=HelpForm.switch,
arguments=['Plugin Building'], shortcut='b')
def exit(self, *args, **keywords):
self.parentApp.switchForm('MAIN')
def on_cancel(self):
self.exit()
def on_ok(self):
self.change_forms()
def change_forms(self, *args, **keywords):
"""
Checks which form is currently displayed and toggles to the other one
"""
# Returns to previous Form in history if there is a previous Form
try:
self.parentApp.switchFormPrevious()
except Exception as e: # pragma: no cover
self.parentApp.switchForm('MAIN')
|
{
"content_hash": "434ac253a2a6192cd5e278b59dc17591",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 79,
"avg_line_length": 52.00653594771242,
"alnum_prop": 0.5885383938670353,
"repo_name": "CyberReboot/vent",
"id": "906e3fac29396cb4347e43b944c2bb6b754ef167",
"size": "7957",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vent/menus/help.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "8858"
},
{
"name": "Go",
"bytes": "227"
},
{
"name": "Makefile",
"bytes": "4286"
},
{
"name": "Python",
"bytes": "337477"
},
{
"name": "Shell",
"bytes": "4107"
}
],
"symlink_target": ""
}
|
import json
import logging
import os
import os.path
import pickle
import random
import resource
import signal
import socket
import subprocess
import sys
import threading
import time
import traceback
import addict
import psutil
from pymesos import Executor, MesosExecutorDriver, decode_data, encode_data
from urllib.request import urlopen
from toil.batchSystems.abstractBatchSystem import BatchSystemSupport
from toil.lib.expando import Expando
from toil.lib.threading import cpu_count
from toil.resource import Resource
from toil.statsAndLogging import configure_root_logger, set_log_level
log = logging.getLogger(__name__)
class MesosExecutor(Executor):
"""
Part of Toil's Mesos framework, runs on a Mesos agent. A Toil job is passed to it via the
task.data field, and launched via call(toil.command).
"""
def __init__(self):
super(MesosExecutor, self).__init__()
self.popenLock = threading.Lock()
self.runningTasks = {}
self.workerCleanupInfo = None
log.debug('Preparing system for resource download')
Resource.prepareSystem()
self.address = None
self.id = None
# Setting this value at this point will ensure that the toil workflow directory will go to
# the mesos sandbox if the user hasn't specified --workDir on the command line.
if not os.getenv('TOIL_WORKDIR'):
os.environ['TOIL_WORKDIR'] = os.getcwd()
def registered(self, driver, executorInfo, frameworkInfo, agentInfo):
"""
Invoked once the executor driver has been able to successfully connect with Mesos.
"""
# Get the ID we have been assigned, if we have it
self.id = executorInfo.executor_id.get('value', None)
log.debug("Registered executor %s with framework", self.id)
self.address = socket.gethostbyname(agentInfo.hostname)
nodeInfoThread = threading.Thread(target=self._sendFrameworkMessage, args=[driver], daemon=True)
nodeInfoThread.start()
def reregistered(self, driver, agentInfo):
"""
Invoked when the executor re-registers with a restarted agent.
"""
log.debug("Re-registered")
def disconnected(self, driver):
"""
Invoked when the executor becomes "disconnected" from the agent (e.g., the agent is being
restarted due to an upgrade).
"""
log.critical("Disconnected from agent")
def killTask(self, driver, taskId):
"""
Kill parent task process and all its spawned children
"""
try:
pid = self.runningTasks[taskId.value]
pgid = os.getpgid(pid)
except KeyError:
pass
else:
os.killpg(pgid, signal.SIGKILL)
def shutdown(self, driver):
log.critical('Shutting down executor ...')
for taskId in list(self.runningTasks.keys()):
self.killTask(driver, taskId)
Resource.cleanSystem()
BatchSystemSupport.workerCleanup(self.workerCleanupInfo)
log.critical('... executor shut down.')
def error(self, driver, message):
"""
Invoked when a fatal error has occurred with the executor and/or executor driver.
"""
log.critical("FATAL ERROR: " + message)
def _sendFrameworkMessage(self, driver):
message = None
while True:
# The psutil documentation recommends that we ignore the value returned by the first
# invocation of cpu_percent(). However, we do want to send a sign of life early after
# starting (e.g. to unblock the provisioner waiting for an instance to come up) so
# we call it once and discard the value.
if message is None:
message = Expando(address=self.address)
psutil.cpu_percent()
else:
message.nodeInfo = dict(coresUsed=float(psutil.cpu_percent()) * .01,
memoryUsed=float(psutil.virtual_memory().percent) * .01,
coresTotal=cpu_count(),
memoryTotal=psutil.virtual_memory().total,
workers=len(self.runningTasks))
log.debug("Send framework message: %s", message)
driver.sendFrameworkMessage(encode_data(repr(message).encode('utf-8')))
# Prevent workers launched together from repeatedly hitting the leader at the same time
time.sleep(random.randint(45, 75))
def launchTask(self, driver, task):
"""
Invoked by SchedulerDriver when a Mesos task should be launched by this executor
"""
log.debug("Asked to launch task %s", repr(task))
def runTask():
log.debug("Running task %s", task.task_id.value)
startTime = time.time()
sendUpdate(task, 'TASK_RUNNING', wallTime=0)
# try to unpickle the task
try:
taskData = pickle.loads(decode_data(task.data))
except:
exc_info = sys.exc_info()
log.error('Exception while unpickling task: ', exc_info=exc_info)
exc_type, exc_value, exc_trace = exc_info
sendUpdate(task, 'TASK_FAILED', wallTime=0, msg=''.join(traceback.format_exception_only(exc_type, exc_value)))
return
# This is where task.data is first invoked. Using this position to setup cleanupInfo
if self.workerCleanupInfo is not None:
assert self.workerCleanupInfo == taskData.workerCleanupInfo
else:
self.workerCleanupInfo = taskData.workerCleanupInfo
# try to invoke a run on the unpickled task
try:
process = runJob(taskData)
self.runningTasks[task.task_id.value] = process.pid
try:
exitStatus = process.wait()
wallTime = time.time() - startTime
if 0 == exitStatus:
sendUpdate(task, 'TASK_FINISHED', wallTime)
elif -9 == exitStatus:
sendUpdate(task, 'TASK_KILLED', wallTime)
else:
sendUpdate(task, 'TASK_FAILED', wallTime, msg=str(exitStatus))
finally:
del self.runningTasks[task.task_id.value]
except:
wallTime = time.time() - startTime
exc_info = sys.exc_info()
log.error('Exception while running task:', exc_info=exc_info)
exc_type, exc_value, exc_trace = exc_info
sendUpdate(task, 'TASK_FAILED', wallTime=wallTime, msg=''.join(traceback.format_exception_only(exc_type, exc_value)))
wallTime = time.time() - startTime
sendUpdate(task, 'TASK_FINISHED', wallTime)
def runJob(job):
"""
:type job: toil.batchSystems.mesos.ToilJob
:rtype: subprocess.Popen
"""
if job.userScript:
job.userScript.register()
log.debug("Invoking command: '%s'", job.command)
# Construct the job's environment
jobEnv = dict(os.environ, **job.environment)
log.debug('Using environment variables: %s', jobEnv.keys())
with self.popenLock:
return subprocess.Popen(job.command,
preexec_fn=lambda: os.setpgrp(),
shell=True, env=jobEnv)
def sendUpdate(task, taskState, wallTime, msg=''):
update = addict.Dict()
update.task_id.value = task.task_id.value
if self.id is not None:
# Sign our messages as from us, since the driver doesn't do it.
update.executor_id.value = self.id
update.state = taskState
update.message = msg
# Add wallTime as a label.
labels = addict.Dict()
labels.labels = [{'key': 'wallTime', 'value': str(wallTime)}]
update.labels = labels
driver.sendStatusUpdate(update)
thread = threading.Thread(target=runTask, daemon=True)
thread.start()
def frameworkMessage(self, driver, message):
"""
Invoked when a framework message has arrived for this executor.
"""
log.debug("Received message from framework: {}".format(message))
def main():
configure_root_logger()
set_log_level("INFO")
if not os.environ.get("MESOS_AGENT_ENDPOINT"):
# Some Mesos setups in our tests somehow lack this variable. Provide a
# fake one to maybe convince the executor driver to work.
os.environ["MESOS_AGENT_ENDPOINT"] = os.environ.get("MESOS_SLAVE_ENDPOINT", "127.0.0.1:5051")
log.warning("Had to fake MESOS_AGENT_ENDPOINT as %s" % os.environ["MESOS_AGENT_ENDPOINT"])
# must be set manually to enable toggling of the mesos log level for debugging jenkins
# may be useful: https://github.com/DataBiosphere/toil/pull/2338#discussion_r223854931
if False:
try:
urlopen("http://%s/logging/toggle?level=1&duration=15mins" % os.environ["MESOS_AGENT_ENDPOINT"]).read()
log.debug("Toggled agent log level")
except Exception:
log.debug("Failed to toggle agent log level")
# Parse the agent state
agent_state = json.loads(urlopen("http://%s/state" % os.environ["MESOS_AGENT_ENDPOINT"]).read())
if 'completed_frameworks' in agent_state:
# Drop the completed frameworks which grow over time
del agent_state['completed_frameworks']
log.debug("Agent state: %s", str(agent_state))
log.debug("Virtual memory info in executor: %s" % repr(psutil.virtual_memory()))
if os.path.exists('/sys/fs/cgroup/memory'):
# Mesos can limit memory with a cgroup, so we should report on that.
for (dirpath, dirnames, filenames) in os.walk('/sys/fs/cgroup/memory', followlinks=True):
for filename in filenames:
if 'limit_in_bytes' not in filename:
continue
log.debug('cgroup memory info from %s:' % os.path.join(dirpath, filename))
try:
for line in open(os.path.join(dirpath, filename)):
log.debug(line.rstrip())
except Exception:
log.debug("Failed to read file")
# Mesos can also impose rlimit limits, including on things that really
# ought to not be limited, like virtual address space size.
log.debug('DATA rlimit: %s', str(resource.getrlimit(resource.RLIMIT_DATA)))
log.debug('STACK rlimit: %s', str(resource.getrlimit(resource.RLIMIT_STACK)))
log.debug('RSS rlimit: %s', str(resource.getrlimit(resource.RLIMIT_RSS)))
log.debug('AS rlimit: %s', str(resource.getrlimit(resource.RLIMIT_AS)))
executor = MesosExecutor()
log.debug('Made executor')
driver = MesosExecutorDriver(executor, use_addict=True)
old_on_event = driver.on_event
def patched_on_event(event):
"""
Intercept and log all pymesos events.
"""
log.debug("Event: %s", repr(event))
old_on_event(event)
driver.on_event = patched_on_event
log.debug('Made driver')
driver.start()
log.debug('Started driver')
driver_result = driver.join()
log.debug('Joined driver')
# Tolerate a None in addition to the code the docs suggest we should receive from join()
exit_value = 0 if (driver_result is None or driver_result == 'DRIVER_STOPPED') else 1
assert len(executor.runningTasks) == 0
sys.exit(exit_value)
|
{
"content_hash": "9c7b212f8b82a8b634c7142fc181a529",
"timestamp": "",
"source": "github",
"line_count": 294,
"max_line_length": 133,
"avg_line_length": 40.105442176870746,
"alnum_prop": 0.6042744466118226,
"repo_name": "BD2KGenomics/slugflow",
"id": "bffa456061ca8ca3cefac43f2383e01684f5b17d",
"size": "12403",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/toil/batchSystems/mesos/executor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "4074"
},
{
"name": "Python",
"bytes": "618803"
},
{
"name": "Shell",
"bytes": "19115"
}
],
"symlink_target": ""
}
|
from django.contrib.syndication.views import Feed
from django.db.transaction import non_atomic_requests
from django.utils.decorators import method_decorator
class NonAtomicFeed(Feed):
"""
A feed that does not use transactions.
Feeds are special because they don't inherit from generic Django class
views so you can't decorate dispatch().
"""
@method_decorator(non_atomic_requests)
def __call__(self, *args, **kwargs):
return super(NonAtomicFeed, self).__call__(*args, **kwargs)
|
{
"content_hash": "5b3a2a7ea1d880ae2fb7227b64b846d9",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 74,
"avg_line_length": 32.375,
"alnum_prop": 0.7181467181467182,
"repo_name": "andymckay/addons-server",
"id": "1afb0863216192bc975b1187d1ded504360ace3d",
"size": "518",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "src/olympia/amo/feeds.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "249"
},
{
"name": "CSS",
"bytes": "846032"
},
{
"name": "HTML",
"bytes": "1589366"
},
{
"name": "JavaScript",
"bytes": "1316196"
},
{
"name": "Makefile",
"bytes": "4442"
},
{
"name": "PLSQL",
"bytes": "74"
},
{
"name": "Python",
"bytes": "4128481"
},
{
"name": "Shell",
"bytes": "9112"
},
{
"name": "Smarty",
"bytes": "1930"
}
],
"symlink_target": ""
}
|
from .mod import InplusTraderBacktestMod
def load_mod():
return InplusTraderBacktestMod()
|
{
"content_hash": "860ce892c2241fa1d62f5889ae70dc3b",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 40,
"avg_line_length": 19.2,
"alnum_prop": 0.78125,
"repo_name": "zhengwsh/InplusTrader_Linux",
"id": "7c32a462f9ec4b67cf3f365c8455052a33571622",
"size": "701",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "InplusTrader/backtestEngine/mod/inplus_trader_backtest/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1727083"
},
{
"name": "C++",
"bytes": "3367509"
},
{
"name": "CMake",
"bytes": "3288"
},
{
"name": "Jupyter Notebook",
"bytes": "10948"
},
{
"name": "Objective-C",
"bytes": "1612"
},
{
"name": "Python",
"bytes": "3819836"
},
{
"name": "Shell",
"bytes": "397"
}
],
"symlink_target": ""
}
|
"""This module contains report plugin mocks used for testing."""
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import utils
from grr_response_server.gui.api_plugins.report_plugins import rdf_report_plugins
from grr_response_server.gui.api_plugins.report_plugins import report_plugin_base
from grr_response_server.gui.api_plugins.report_plugins import report_plugins
class FooReportPlugin(report_plugin_base.ReportPluginBase):
TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.CLIENT
TITLE = "Foo"
SUMMARY = "Reports all foos."
class BarReportPlugin(report_plugin_base.ReportPluginBase):
TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
TITLE = "Bar Activity"
SUMMARY = "Reports bars' activity in the given time range."
REQUIRES_TIME_RANGE = True
def GetReportData(self, get_report_args):
ret = rdf_report_plugins.ApiReportData(
representation_type=rdf_report_plugins.ApiReportData.RepresentationType.
STACK_CHART)
database = {
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/11"): (1, 0),
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/12"): (2, 1),
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/13"): (3, 2),
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14"): (5, 3),
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/15"): (8, 4),
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/16"): (13, 5),
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/17"): (21, 6),
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/18"): (34, 7)
}
ret.stack_chart.data = [
rdf_report_plugins.ApiReportDataSeries2D(
label="Bar",
points=[
rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y)
for (t, (x, y)) in sorted(database.items())
if get_report_args.start_time <= t and
t < get_report_args.start_time + get_report_args.duration
])
]
return ret
class MockedReportPlugins(object):
"""A context manager that swaps available reports with the mocked reports."""
def __init__(self):
self.stubber = utils.Stubber(report_plugins.REGISTRY, "plugins", {
"FooReportPlugin": FooReportPlugin,
"BarReportPlugin": BarReportPlugin
})
def __enter__(self):
self.Start()
def __exit__(self, *_):
self.Stop()
def Start(self):
self.stubber.Start()
def Stop(self):
self.stubber.Stop()
|
{
"content_hash": "efa561154bd5d5aa8d590a0d119eb95a",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 81,
"avg_line_length": 34.5,
"alnum_prop": 0.679549114331723,
"repo_name": "google/grr",
"id": "e9f952eea1170e6996e177c91953705994aac9f0",
"size": "2506",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "grr/server/grr_response_server/gui/api_plugins/report_plugins/report_plugins_test_mocks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "12697"
},
{
"name": "C++",
"bytes": "54814"
},
{
"name": "Dockerfile",
"bytes": "1822"
},
{
"name": "HCL",
"bytes": "8451"
},
{
"name": "HTML",
"bytes": "366783"
},
{
"name": "JavaScript",
"bytes": "13088"
},
{
"name": "Jupyter Notebook",
"bytes": "199216"
},
{
"name": "Makefile",
"bytes": "3244"
},
{
"name": "PowerShell",
"bytes": "531"
},
{
"name": "Python",
"bytes": "8844725"
},
{
"name": "Roff",
"bytes": "444"
},
{
"name": "SCSS",
"bytes": "105120"
},
{
"name": "Shell",
"bytes": "48663"
},
{
"name": "Standard ML",
"bytes": "8172"
},
{
"name": "TypeScript",
"bytes": "2139377"
}
],
"symlink_target": ""
}
|
"""
plot_grid.py
Class instance used to make Display.
"""
# Load the needed packages
import numpy as np
import os
import pyart
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as \
NavigationToolbar
from matplotlib.figure import Figure
from matplotlib.colors import Normalize as mlabNormalize
from matplotlib.colorbar import ColorbarBase as mlabColorbarBase
from matplotlib.pyplot import cm
from ..core import Variable, Component, common, VariableChoose, QtGui, QtCore
from ..core.points import Points
# Save image file type and DPI (resolution)
IMAGE_EXT = 'png'
DPI = 200
# ========================================================================
class GridDisplay(Component):
'''
Class to create a display plot, using a Grid structure.
'''
Vgrid = None #: see :ref:`shared_variable`
Vfield = None #: see :ref:`shared_variable`
VlevelZ = None \
#: see :ref:`shared_variable`, only used if plot_type="gridZ"
VlevelY = None \
#: see :ref:`shared_variable`, only used if plot_type="gridY"
VlevelX = None \
#: see :ref:`shared_variable`, only used if plot_type="gridX"
Vcmap = None #: see :ref:`shared_variable`
VplotAxes = None #: see :ref:`shared_variable` (no internal use)
VpathInteriorFunc = None #: see :ref:`shared_variable` (no internal use)
@classmethod
def guiStart(self, parent=None):
'''Graphical interface for starting this class'''
args = _DisplayStart().startDisplay()
args['parent'] = parent
return self(**args), True
def __init__(self, Vgrid=None, Vfield=None, VlevelZ=None, VlevelY=None,
VlevelX=None, Vlims=None, Vcmap=None, plot_type="gridZ",
name="Display", parent=None):
'''
Initialize the class to create display.
Parameters
----------
[Optional]
Vgrid : :py:class:`~artview.core.core.Variable` instance
grid signal variable. If None start new one with None.
Vfield : :py:class:`~artview.core.core.Variable` instance
Field signal variable. If None start new one with empty string.
VlevelZ : :py:class:`~artview.core.core.Variable` instance
Signal variable for vertical level, only used if
plot_type="gridZ". If None start with value zero.
VlevelY : :py:class:`~artview.core.core.Variable` instance
Signal variable for latitudinal level, only used if
plot_type="gridY". If None start with value zero.
VlevelX : :py:class:`~artview.core.core.Variable` instance
Signal variable for longitudinal level, only used if
plot_type="gridX". If None start with value zero.
Vlims : :py:class:`~artview.core.core.Variable` instance
Limits signal variable.
A value of None will instantiate a limits variable.
Vcmap : :py:class:`~artview.core.core.Variable` instance
Colormap signal variable.
A value of None will instantiate a colormap variable.
plot_type : "gridZ", "gridY" or "gridX"
Define plot type, "gridZ" will plot a Z level, that is a XY
plane. Analog for "gridY" and "gridZ"
name : string
Display window name.
parent : PyQt instance
Parent instance to associate to Display window.
If None, then Qt owns, otherwise associated with parent PyQt
instance.
Notes
-----
This class records the selected button and passes the
change value back to variable.
'''
super(GridDisplay, self).__init__(name=name, parent=parent)
self.setFocusPolicy(QtCore.Qt.ClickFocus)
self.basemap = None
# Set up signal, so that DISPLAY can react to
# external (or internal) changes in grid, field,
# lims and level (expected to be Core.Variable instances)
# The capital V so people remember using ".value"
if Vgrid is None:
self.Vgrid = Variable(None)
else:
self.Vgrid = Vgrid
if Vfield is None:
self.Vfield = Variable('')
else:
self.Vfield = Vfield
if VlevelZ is None:
self.VlevelZ = Variable(0)
else:
self.VlevelZ = VlevelZ
if VlevelY is None:
self.VlevelY = Variable(0)
else:
self.VlevelY = VlevelY
if VlevelX is None:
self.VlevelX = Variable(0)
else:
self.VlevelX = VlevelX
if Vlims is None:
self.Vlims = Variable(None)
else:
self.Vlims = Vlims
if Vcmap is None:
self.Vcmap = Variable(None)
else:
self.Vcmap = Vcmap
self.VpathInteriorFunc = Variable(self.getPathInteriorValues)
self.VplotAxes = Variable(None)
self.sharedVariables = {"Vgrid": self.Newgrid,
"Vfield": self.NewField,
"Vlims": self.NewLims,
"Vcmap": self.NewCmap,
"VpathInteriorFunc": None,
"VplotAxes": None}
self.change_plot_type(plot_type)
# Connect the components
self.connectAllVariables()
# Set plot title and colorbar units to defaults
self.title = self._get_default_title()
self.units = self._get_default_units()
# set default latlon lines
self.lat_lines = np.linspace(-90, 90, num=181)
self.lon_lines = np.linspace(-180, 180, num=361)
# Find the PyArt colormap names
self.cm_names = ["pyart_" + m for m in pyart.graph.cm.datad
if not m.endswith("_r")]
self.cm_names.sort()
# Create tool dictionary
self.tools = {}
# Set up Default limits and cmap
if Vlims is None:
self._set_default_limits(strong=False)
if Vcmap is None:
self._set_default_cmap(strong=False)
# Create a figure for output
self._set_fig_ax()
# Launch the GUI interface
self.LaunchGUI()
# Initialize grid variable
self.Newgrid(None, None, True)
self._update_fig_ax()
self.show()
def keyPressEvent(self, event):
'''Allow level adjustment via the Up-Down arrow keys.'''
if event.key() == QtCore.Qt.Key_Up:
self.LevelSelectCmd(self.Vlevel.value + 1)
elif event.key() == QtCore.Qt.Key_Down:
self.LevelSelectCmd(self.Vlevel.value - 1)
else:
super(GridDisplay, self).keyPressEvent(event)
####################
# GUI methods #
####################
def LaunchGUI(self):
'''Launches a GUI interface.'''
# Create layout
self.layout = QtGui.QGridLayout()
self.layout.setSpacing(8)
# Create the widget
self.central_widget = QtGui.QWidget()
self.setCentralWidget(self.central_widget)
self._set_figure_canvas()
self.central_widget.setLayout(self.layout)
# Add buttons along display for user control
self.addButtons()
self.setUILayout()
# Set the status bar to display messages
self.statusbar = self.statusBar()
##################################
# User display interface methods #
##################################
def addButtons(self):
'''Add a series of buttons for user control over display.'''
# Create the Display controls
self._add_displayBoxUI()
# Create the Level controls
self._add_levelBoxUI()
# Create the Field controls
self._add_fieldBoxUI()
# Create the Tools controls
self._add_toolsBoxUI()
# Create the Informational label at top
self._add_infolabel()
def setUILayout(self):
'''Setup the button/display UI layout.'''
self.layout.addWidget(self.levelBox, 0, 0)
self.layout.addWidget(self.fieldBox, 0, 1)
self.layout.addWidget(self.dispButton, 0, 2)
self.layout.addWidget(self.toolsButton, 0, 3)
self.layout.addWidget(self.infolabel, 0, 4)
#############################
# Functionality methods #
#############################
def _open_LimsDialog(self):
'''Open a dialog box to change display limits.'''
from .limits import limits_dialog
limits, cmap, change = limits_dialog(self.Vlims.value,
self.Vcmap.value, self.name)
if change == 1:
self.Vcmap.change(cmap, False)
self.Vlims.change(limits)
def _fillLevelBox(self):
'''Fill in the Level Window Box with current levels.'''
self.levelBox.clear()
self.levelBox.addItem("Level Window")
# Loop through and create each level button
if self.plot_type == "gridZ":
levels = self.Vgrid.value.axes['z_disp']['data']
elif self.plot_type == "gridY":
levels = self.Vgrid.value.axes['y_disp']['data']
elif self.plot_type == "gridX":
levels = self.Vgrid.value.axes['x_disp']['data']
for nlevel in range(len(levels)):
btntxt = "%2.1f m (level %d)" % (levels[nlevel], nlevel+1)
self.levelBox.addItem(btntxt)
def _fillFieldBox(self):
'''Fill in the Field Window Box with current variable names.'''
self.fieldBox.clear()
self.fieldBox.addItem("Field Window")
# Loop through and create each field button
for field in self.fieldnames:
self.fieldBox.addItem(field)
def _levelAction(self, text):
'''Define action for Level Button selection.'''
if text == "Level Window":
self._open_levelbuttonwindow()
else:
nlevel = int(text.split("(level ")[1][:-1])-1
self.LevelSelectCmd(nlevel)
def _fieldAction(self, text):
'''Define action for Field Button selection.'''
if text == "Field Window":
self._open_fieldbuttonwindow()
else:
self.FieldSelectCmd(str(text))
def _title_input(self):
'''Retrieve new plot title.'''
val, entry = common.string_dialog_with_reset(
self.title, "Plot Title", "Title:", self._get_default_title())
if entry is True:
self.title = val
self._update_plot()
def _units_input(self):
'''Retrieve new plot units.'''
val, entry = common.string_dialog_with_reset(
self.units, "Plot Units", "Units:", self._get_default_units())
if entry is True:
self.units = val
self._update_plot()
def _open_levelbuttonwindow(self):
'''Open a LevelButtonWindow instance.'''
from .level import LevelButtonWindow
if self.plot_type == "gridZ":
self.levelbuttonwindow = LevelButtonWindow(
self.Vlevel, self.plot_type, Vcontainer=self.Vgrid,
controlType="radio", name=self.name+" Level Selection",
parent=self.parent)
else:
self.levelbuttonwindow = LevelButtonWindow(
self.Vlevel, self.plot_type, Vcontainer=self.Vgrid,
controlType="slider", name=self.name+" Level Selection",
parent=self.parent)
def _open_fieldbuttonwindow(self):
'''Open a FieldButtonWindow instance.'''
from .field import FieldButtonWindow
self.fieldbuttonwindow = FieldButtonWindow(
self.Vgrid, self.Vfield,
name=self.name+" Field Selection", parent=self.parent)
def _add_cmaps_to_button(self):
'''Add a menu to change colormap used for plot.'''
for cm_name in self.cm_names:
cmapAction = self.dispCmapmenu.addAction(cm_name)
cmapAction.setStatusTip("Use the %s colormap" % cm_name)
cmapAction.triggered[()].connect(
lambda cm_name=cm_name: self.cmapSelectCmd(cm_name))
self.dispCmap.setMenu(self.dispCmapmenu)
def _add_displayBoxUI(self):
'''Create the Display Options Button menu.'''
self.dispButton = QtGui.QPushButton("Display Options")
self.dispButton.setToolTip("Adjust display properties")
self.dispButton.setFocusPolicy(QtCore.Qt.NoFocus)
dispmenu = QtGui.QMenu(self)
dispLimits = dispmenu.addAction("Adjust Display Limits")
dispLimits.setToolTip("Set data, X, and Y range limits")
dispTitle = dispmenu.addAction("Change Title")
dispTitle.setToolTip("Change plot title")
dispUnit = dispmenu.addAction("Change Units")
dispUnit.setToolTip("Change units string")
self.dispCmap = dispmenu.addAction("Change Colormap")
self.dispCmapmenu = QtGui.QMenu("Change Cmap")
self.dispCmapmenu.setFocusPolicy(QtCore.Qt.NoFocus)
dispQuickSave = dispmenu.addAction("Quick Save Image")
dispQuickSave.setShortcut("Ctrl+D")
dispQuickSave.setToolTip(
"Save Image to local directory with default name")
dispSaveFile = dispmenu.addAction("Save Image")
dispSaveFile.setShortcut("Ctrl+S")
dispSaveFile.setStatusTip("Save Image using dialog")
dispLimits.triggered[()].connect(self._open_LimsDialog)
dispTitle.triggered[()].connect(self._title_input)
dispUnit.triggered[()].connect(self._units_input)
dispQuickSave.triggered[()].connect(self._quick_savefile)
dispSaveFile.triggered[()].connect(self._savefile)
self._add_cmaps_to_button()
self.dispButton.setMenu(dispmenu)
def _add_levelBoxUI(self):
'''Create the Level Selection ComboBox.'''
self.levelBox = QtGui.QComboBox()
self.levelBox.setFocusPolicy(QtCore.Qt.NoFocus)
self.levelBox.setToolTip(
"Select level slice to display.\n"
"'Level Window' will launch popup.\n"
"Up/Down arrow keys Increase/Decrease level.")
self.levelBox.activated[str].connect(self._levelAction)
def _add_fieldBoxUI(self):
'''Create the Field Selection ComboBox.'''
self.fieldBox = QtGui.QComboBox()
self.fieldBox.setFocusPolicy(QtCore.Qt.NoFocus)
self.fieldBox.setToolTip("Select variable/field in data file.\n"
"'Field Window' will launch popup.\n")
self.fieldBox.activated[str].connect(self._fieldAction)
def _add_toolsBoxUI(self):
'''Create the Tools Button menu.'''
self.toolsButton = QtGui.QPushButton("Toolbox")
self.toolsButton.setFocusPolicy(QtCore.Qt.NoFocus)
self.toolsButton.setToolTip("Choose a tool to apply")
toolmenu = QtGui.QMenu(self)
toolZoomPan = toolmenu.addAction("Zoom/Pan")
toolValueClick = toolmenu.addAction("Click for Value")
toolSelectRegion = toolmenu.addAction("Select a Region of Interest")
toolReset = toolmenu.addAction("Reset Tools")
toolDefault = toolmenu.addAction("Reset File Defaults")
toolZoomPan.triggered[()].connect(self.toolZoomPanCmd)
toolValueClick.triggered[()].connect(self.toolValueClickCmd)
toolSelectRegion.triggered[()].connect(self.toolSelectRegionCmd)
toolReset.triggered[()].connect(self.toolResetCmd)
toolDefault.triggered[()].connect(self.toolDefaultCmd)
self.toolsButton.setMenu(toolmenu)
def _add_infolabel(self):
'''Create an information label about the display'''
self.infolabel = QtGui.QLabel("Grid: \n"
"Field: \n"
"Level: ", self)
self.infolabel.setStyleSheet('color: red; font: italic 10px')
self.infolabel.setToolTip("Filename not loaded")
def _update_infolabel(self):
if self.Vgrid.value is None:
return
self.infolabel.setText(
"Grid: %s\n"
"Field: %s\n"
"Level: %d" % (self.Vgrid.value.metadata['instrument_name'],
self.Vfield.value,
self.Vlevel.value+1))
if hasattr(self.Vgrid.value, 'filename'):
self.infolabel.setToolTip(self.Vgrid.value.filename)
########################
# Selectionion methods #
########################
def Newgrid(self, variable, value, strong):
'''
Slot for 'ValueChanged' signal of
:py:class:`Vgrid <artview.core.core.Variable>`.
This will:
* Update fields and levels lists and MenuBoxes
* Check grid scan type and reset limits if needed
* Reset units and title
* If strong update: update plot
'''
# test for None
if self.Vgrid.value is None:
self.fieldBox.clear()
self.levelBox.clear()
return
# Get field names
self.fieldnames = self.Vgrid.value.fields.keys()
# Check the file type and initialize limts
self._check_file_type()
# Update field and level MenuBox
self._fillLevelBox()
self._fillFieldBox()
self.units = self._get_default_units()
self.title = self._get_default_title()
if strong:
self._update_plot()
self._update_infolabel()
def NewField(self, variable, value, strong):
'''
Slot for 'ValueChanged' signal of
:py:class:`Vfield <artview.core.core.Variable>`.
This will:
* Reset colormap
* Reset units
* Update fields MenuBox
* If strong update: update plot
'''
self._set_default_cmap(strong=False)
self.units = self._get_default_units()
self.title = self._get_default_title()
idx = self.fieldBox.findText(value)
self.fieldBox.setCurrentIndex(idx)
if strong:
self._update_plot()
self._update_infolabel()
def NewLims(self, variable, value, strong):
'''
Slot for 'ValueChanged' signal of
:py:class:`Vlims <artview.core.core.Variable>`.
This will:
* If strong update: update axes
'''
if strong:
self._update_axes()
def NewCmap(self, variable, value, strong):
'''
Slot for 'ValueChanged' signal of
:py:class:`Vcmap <artview.core.core.Variable>`.
This will:
* If strong update: update plot
'''
if strong:
self._update_plot()
def NewLevel(self, variable, value, strong):
'''
Slot for 'ValueChanged' signal of
:py:class:`Vlevel* <artview.core.core.Variable>`.
This will:
* Update level MenuBox
* If strong update: update plot
'''
# +1 since the first one is "Level Window"
self.levelBox.setCurrentIndex(value+1)
if strong:
self._update_plot()
self._update_infolabel()
def LevelSelectCmd(self, nlevel):
'''
Captures Level selection and update Level
:py:class:`~artview.core.core.Variable`.
'''
if nlevel < 0:
nlevel = len(self.levels)-1
elif nlevel >= len(self.levels):
nlevel = 0
self.Vlevel.change(nlevel)
def FieldSelectCmd(self, name):
'''
Captures field selection and update field
:py:class:`~artview.core.core.Variable`.
'''
self.Vfield.change(name)
def cmapSelectCmd(self, cm_name):
'''Captures colormap selection and redraws.'''
CMAP = cm_name
self.Vcmap.value['cmap'] = cm_name
self.Vcmap.update()
def toolZoomPanCmd(self):
'''Creates and connects to a Zoom/Pan instance.'''
from .tools import ZoomPan
scale = 1.1
self.tools['zoompan'] = ZoomPan(
self.Vlims, self.ax,
base_scale=scale, parent=self.parent)
self.tools['zoompan'].connect()
def toolValueClickCmd(self):
'''Creates and connects to Point-and-click value retrieval'''
from .pick_value import ValueClick
self.tools['valueclick'] = ValueClick(
self, name=self.name + "ValueClick", parent=self)
def toolSelectRegionCmd(self):
'''Creates and connects to Region of Interest instance.'''
from .select_region_old import SelectRegion
self.tools['select_region'] = SelectRegion(
self.VplotAxes, self.VpathInteriorFunc, self.Vfield,
name=self.name + " SelectRegion", parent=self)
def toolResetCmd(self):
'''Reset tools via disconnect.'''
from . import tools
self.tools = tools.reset_tools(self.tools)
def toolDefaultCmd(self):
'''Restore the Display defaults.'''
for key in self.tools.keys():
if self.tools[key] is not None:
self.tools[key].disconnect()
self.tools[key] = None
self._set_default_limits()
self._set_default_cmap()
def getPathInteriorValues(self, paths):
'''
Return the bins values path.
Parameters
----------
paths : list of :py:class:`matplotlib.path.Path` instances
Returns
-------
points : :py:class`artview.core.points.Points`
Points object containing all bins of the current grid
and level inside path. Axes : 'x_disp', 'y_disp', 'x_disp',
'x_index', 'y_index', 'z_index'. Fields: just current field
Notes
-----
If Vgrid.value is None, returns None
'''
from .tools import interior_grid
grid = self.Vgrid.value
if grid is None:
return None
try:
iter(paths)
except:
paths = [paths]
xy = np.empty((0, 2))
idx = np.empty((0, 2), dtype=np.int)
for path in paths:
_xy, _idx = interior_grid(path, grid, self.basemap,
self.Vlevel.value, self.plot_type)
xy = np.concatenate((xy, _xy))
idx = np.concatenate((idx, _idx))
if self.plot_type == "gridZ":
x = xy[:, 0]
y = xy[:, 1]
z = np.ones_like(xy[:, 0]) * self.levels[self.VlevelZ.value]
x_idx = idx[:, 0]
y_idx = idx[:, 1]
z_idx = np.ones_like(idx[:, 0]) * self.VlevelZ.value
elif self.plot_type == "gridY":
x = xy[:, 0] * 1000.
z = xy[:, 1] * 1000.
y = np.ones_like(xy[:, 0]) * self.levels[self.VlevelY.value]
x_idx = idx[:, 0]
z_idx = idx[:, 1]
y_idx = np.ones_like(idx[:, 0]) * self.VlevelY.value
elif self.plot_type == "gridX":
z = xy[:, 0] * 1000.
y = xy[:, 1] * 1000.
x = np.ones_like(xy[:, 0]) * self.levels[self.VlevelX.value]
z_idx = idx[:, 0]
y_idx = idx[:, 1]
x_idx = np.ones_like(idx[:, 0]) * self.VlevelX.value
xaxis = {'data': x,
'long_name': 'X-coordinate in Cartesian system',
'axis': 'X',
'units': 'm'}
yaxis = {'data': y,
'long_name': 'Y-coordinate in Cartesian system',
'axis': 'Y',
'units': 'm'}
zaxis = {'data': z,
'long_name': 'Z-coordinate in Cartesian system',
'axis': 'Z',
'units': 'm'}
field = grid.fields[self.Vfield.value].copy()
field['data'] = grid.fields[self.Vfield.value]['data'][
z_idx, y_idx, x_idx]
x_idx = {'data': x_idx,
'long_name': 'index in nx dimension'}
y_idx = {'data': y_idx,
'long_name': 'index in ny dimension'}
z_idx = {'data': z_idx,
'long_name': 'index in nz dimension'}
axes = {'x_disp': xaxis,
'y_disp': yaxis,
'z_disp': zaxis,
'x_index': x_idx,
'y_index': y_idx,
'z_index': z_idx, }
fields = {self.Vfield.value: field}
points = Points(fields, axes, grid.metadata.copy(), xy.shape[0])
return points
def getNearestPoints(self, xdata, ydata):
'''
Return the bins values nearest to point.
Parameters
----------
xdata, ydata : float
Returns
-------
x, y, z, value, x_idx, y_idx, z_idx: ndarray
Truplet of 1arrays containing x,y,z coordinate, current field
value, x, y and z index.
Notes
-----
If Vgrid.value is None, returns None
'''
from .tools import nearest_point_grid
grid = self.Vgrid.value
# map center
lat0 = self.Vgrid.value.axes['lat']['data'][0]
lon0 = self.Vgrid.value.axes['lon']['data'][0]
if grid is None:
return (np.array([]),)*7
if self.plot_type == "gridZ":
idx = nearest_point_grid(
grid, self.basemap, self.levels[self.VlevelZ.value], ydata,
xdata)
elif self.plot_type == "gridY":
idx = nearest_point_grid(
grid, self.basemap, ydata * 1000.,
self.levels[self.VlevelY.value], xdata * 1000.)
elif self.plot_type == "gridX":
idx = nearest_point_grid(
grid, self.basemap, ydata * 1000., xdata * 1000.,
self.levels[self.VlevelX.value])
aux = (grid.axes['x_disp']['data'][idx[:, 2]],
grid.axes['y_disp']['data'][idx[:, 1]],
grid.axes['z_disp']['data'][idx[:, 0]],
grid.fields[self.Vfield.value]['data'][idx[:, 0], idx[:, 1],
idx[:, 2]],
idx[:, 2], idx[:, 1], idx[:, 0])
return aux
####################
# Plotting methods #
####################
def _set_fig_ax(self):
'''Set the figure and axis to plot.'''
self.XSIZE = 8
self.YSIZE = 8
self.fig = Figure(figsize=(self.XSIZE, self.YSIZE))
self.ax = self.fig.add_axes([0.2, 0.2, 0.7, 0.7])
self.cax = self.fig.add_axes([0.2, 0.10, 0.7, 0.02])
self.VplotAxes.change(self.ax)
# self._update_axes()
def _update_fig_ax(self):
'''Set the figure and axis to plot.'''
if self.plot_type in ("gridX", "gridY"):
self.YSIZE = 5
else:
self.YSIZE = 8
xwidth = 0.7
yheight = 0.7
self.ax.set_position([0.15, 0.15, xwidth, yheight])
self.cax.set_position([0.15+xwidth, 0.15, 0.02, yheight])
self._update_axes()
def _set_figure_canvas(self):
'''Set the figure canvas to draw in window area.'''
self.canvas = FigureCanvasQTAgg(self.fig)
# Add the widget to the canvas
self.layout.addWidget(self.canvas, 1, 0, 7, 6)
def _update_plot(self):
'''Draw/Redraw the plot.'''
if self.Vgrid.value is None:
return
# Create the plot with PyArt GridMapDisplay
self.ax.cla() # Clear the plot axes
self.cax.cla() # Clear the colorbar axes
if self.Vfield.value not in self.Vgrid.value.fields.keys():
self.canvas.draw()
self.statusbar.setStyleSheet("QStatusBar{padding-left:8px;" +
"background:rgba(255,0,0,255);" +
"color:black;font-weight:bold;}")
self.statusbar.showMessage("Field not Found in Radar", msecs=5000)
return
else:
self.statusbar.setStyleSheet("QStatusBar{padding-left:8px;" +
"background:rgba(0,0,0,0);" +
"color:black;font-weight:bold;}")
self.statusbar.clearMessage()
title = self.title
limits = self.Vlims.value
cmap = self.Vcmap.value
self.display = pyart.graph.GridMapDisplay(self.Vgrid.value)
# Create Plot
if self.plot_type == "gridZ":
self.display.plot_basemap(
self.lat_lines, self.lon_lines, ax=self.ax)
self.basemap = self.display.get_basemap()
self.plot = self.display.plot_grid(
self.Vfield.value, self.VlevelZ.value, vmin=cmap['vmin'],
vmax=cmap['vmax'], cmap=cmap['cmap'], colorbar_flag=False,
title=title, ax=self.ax, fig=self.fig)
elif self.plot_type == "gridY":
self.basemap = None
self.plot = self.display.plot_latitudinal_level(
self.Vfield.value, self.VlevelY.value, vmin=cmap['vmin'],
vmax=cmap['vmax'], cmap=cmap['cmap'], colorbar_flag=False,
title=title, ax=self.ax, fig=self.fig)
elif self.plot_type == "gridX":
self.basemap = None
self.plot = self.display.plot_longitudinal_level(
self.Vfield.value, self.VlevelX.value, vmin=cmap['vmin'],
vmax=cmap['vmax'], cmap=cmap['cmap'], colorbar_flag=False,
title=title, ax=self.ax, fig=self.fig)
limits = self.Vlims.value
x = self.ax.get_xlim()
y = self.ax.get_ylim()
limits['xmin'] = x[0]
limits['xmax'] = x[1]
limits['ymin'] = y[0]
limits['ymax'] = y[1]
self._update_axes()
norm = mlabNormalize(vmin=cmap['vmin'],
vmax=cmap['vmax'])
self.cbar = mlabColorbarBase(self.cax, cmap=cmap['cmap'],
norm=norm, orientation='vertical')
self.cbar.set_label(self.units)
if self.plot_type == "gridZ":
print("Plotting %s field, Z level %d in %s" % (
self.Vfield.value, self.VlevelZ.value+1, self.name))
elif self.plot_type == "gridY":
print("Plotting %s field, Y level %d in %s" % (
self.Vfield.value, self.VlevelY.value+1, self.name))
elif self.plot_type == "gridX":
print("Plotting %s field, X level %d in %s" % (
self.Vfield.value, self.VlevelX.value+1, self.name))
self.canvas.draw()
def _update_axes(self):
'''Change the Plot Axes.'''
limits = self.Vlims.value
self.ax.set_xlim(limits['xmin'], limits['xmax'])
self.ax.set_ylim(limits['ymin'], limits['ymax'])
self.ax.figure.canvas.draw()
#########################
# Check methods #
#########################
def _set_default_limits(self, strong=True):
'''Set limits to pre-defined default.'''
limits = self.Vlims.value
if limits is None:
limits = {}
if self.Vgrid.value is None:
limits['xmin'] = 0
limits['xmax'] = 1
limits['ymin'] = 0
limits['ymax'] = 1
elif self.plot_type == "gridZ":
if self.basemap is not None:
limits['xmin'] = self.basemap.llcrnrx
limits['xmax'] = self.basemap.urcrnrx
limits['ymin'] = self.basemap.llcrnry
limits['ymax'] = self.basemap.urcrnry
else:
limits['xmin'] = -150
limits['xmax'] = 150
limits['ymin'] = -150
limits['ymax'] = 150
elif self.plot_type == "gridY":
limits['xmin'] = (self.Vgrid.value.axes['x_disp']['data'][0] /
1000.)
limits['xmax'] = (self.Vgrid.value.axes['x_disp']['data'][-1] /
1000.)
limits['ymin'] = (self.Vgrid.value.axes['z_disp']['data'][0] /
1000.)
limits['ymax'] = (self.Vgrid.value.axes['z_disp']['data'][-1] /
1000.)
elif self.plot_type == "gridX":
limits['xmin'] = (self.Vgrid.value.axes['y_disp']['data'][0] /
1000.)
limits['xmax'] = (self.Vgrid.value.axes['y_disp']['data'][-1] /
1000.)
limits['ymin'] = (self.Vgrid.value.axes['z_disp']['data'][0] /
1000.)
limits['ymax'] = (self.Vgrid.value.axes['z_disp']['data'][-1] /
1000.)
self.Vlims.change(limits, strong)
def _set_default_cmap(self, strong=True):
'''Set colormap to pre-defined default.'''
cmap = pyart.config.get_field_colormap(self.Vfield.value)
d = {}
d['cmap'] = cmap
lims = pyart.config.get_field_limits(self.Vfield.value,
self.Vgrid.value)
if lims != (None, None):
d['vmin'] = lims[0]
d['vmax'] = lims[1]
else:
d['vmin'] = -10
d['vmax'] = 65
self.Vcmap.change(d, strong)
def _get_default_title(self):
'''Get default title from pyart.'''
if (self.Vgrid.value is None or
self.Vfield.value not in self.Vgrid.value.fields):
return ''
if self.plot_type == "gridZ":
return pyart.graph.common.generate_grid_title(self.Vgrid.value,
self.Vfield.value,
self.Vlevel.value)
elif self.plot_type == "gridY":
return pyart.graph.common.generate_latitudinal_level_title(
self.Vgrid.value, self.Vfield.value, self.Vlevel.value)
elif self.plot_type == "gridX":
return pyart.graph.common.generate_longitudinal_level_title(
self.Vgrid.value, self.Vfield.value, self.Vlevel.value)
def _get_default_units(self):
'''Get default units for current grid and field.'''
if self.Vgrid.value is not None:
try:
return self.Vgrid.value.fields[self.Vfield.value]['units']
except:
return ''
else:
return ''
def _check_file_type(self):
'''Check file to see if the file type.'''
# self._update_fig_ax()
return
def change_plot_type(self, plot_type):
'''Change plot type.'''
# remove shared variables
for key in ("VlevelZ", "VlevelY", "VlevelX"):
if key in self.sharedVariables.keys():
del self.sharedVariables[key]
if plot_type == "gridZ":
self.sharedVariables["VlevelZ"] = self.NewLevel
elif plot_type == "gridY":
self.sharedVariables["VlevelY"] = self.NewLevel
elif plot_type == "gridX":
self.sharedVariables["VlevelX"] = self.NewLevel
else:
import warnings
warnings.warn('Invalid Plot type %s, reseting to gridZ' %
plot_type)
self.sharedVariables["VlevelZ"] = self.NewLevel
plot_type = "gridZ"
self.plot_type = plot_type
########################
# Image save methods #
########################
def _quick_savefile(self, PTYPE=IMAGE_EXT):
'''Save the current display via PyArt interface.'''
imagename = self.display.generate_filename(
self.Vfield.value, self.Vlevel.value, ext=IMAGE_EXT)
self.canvas.print_figure(os.path.join(os.getcwd(), imagename),
dpi=DPI)
self.statusbar.showMessage('Saved to %s' % os.path.join(os.getcwd(),
imagename))
def _savefile(self, PTYPE=IMAGE_EXT):
'''Save the current display using PyQt dialog interface.'''
imagename = self.display.generate_filename(
self.Vfield.value, self.Vlevel.value, ext=IMAGE_EXT)
file_choices = "PNG (*.png)|*.png"
path = unicode(QtGui.QFileDialog.getSaveFileName(
self, 'Save file', imagename, file_choices))
if path:
self.canvas.print_figure(path, dpi=DPI)
self.statusbar.showMessage('Saved to %s' % path)
########################
# get methods #
########################
def getPlotAxis(self):
''' get :py:class:`matplotlib.axes.Axes` instance of main plot '''
return self.ax
def getStatusBar(self):
''' get :py:class:`PyQt4.QtGui.QStatusBar` instance'''
return self.statusbar
def getField(self):
''' get current field '''
return self.Vfield.value
def getUnits(self):
''' get current units '''
return self.units
########################
# Properties #
########################
@property
def Vlevel(self):
'''Alias to VlevelZ, VlevelY or VlevelX depending on plot_type.'''
if self.plot_type == "gridZ":
return self.VlevelZ
elif self.plot_type == "gridY":
return self.VlevelY
elif self.plot_type == "gridX":
return self.VlevelX
else:
return None
@property
def levels(self):
'''Values from the axes of grid, depending on plot_type.'''
if self.plot_type == "gridZ":
return self.Vgrid.value.axes['z_disp']['data'][:]
elif self.plot_type == "gridY":
return self.Vgrid.value.axes['y_disp']['data'][:]
elif self.plot_type == "gridX":
return self.Vgrid.value.axes['x_disp']['data'][:]
else:
return None
class _DisplayStart(QtGui.QDialog):
'''
Dialog Class for graphical start of display, to be used in guiStart.
'''
def __init__(self):
'''Initialize the class to create the interface.'''
super(_DisplayStart, self).__init__()
self.result = {}
self.layout = QtGui.QGridLayout(self)
# set window as modal
self.setWindowModality(QtCore.Qt.ApplicationModal)
self.setupUi()
def chooseGrid(self):
item = VariableChoose().chooseVariable()
if item is None:
return
else:
self.result["Vgrid"] = getattr(item[1], item[2])
def chooseField(self):
item = VariableChoose().chooseVariable()
if item is None:
return
else:
self.result["Vfield"] = getattr(item[1], item[2])
def chooseLevel(self):
item = VariableChoose().chooseVariable()
if item is None:
return
else:
self.result["VlevelZ"] = getattr(item[1], item[2])
def chooseLims(self):
item = VariableChoose().chooseVariable()
if item is None:
return
else:
self.result["Vlims"] = getattr(item[1], item[2])
def setupUi(self):
self.gridButton = QtGui.QPushButton("Find Variable")
self.gridButton.clicked.connect(self.chooseGrid)
self.layout.addWidget(QtGui.QLabel("Vgrid"), 0, 0)
self.layout.addWidget(self.gridButton, 0, 1, 1, 3)
self.plot_type = QtGui.QLineEdit("gridZ")
self.layout.addWidget(QtGui.QLabel("plot_type"), 1, 0)
self.layout.addWidget(self.plot_type, 1, 1, 1, 3)
self.fieldButton = QtGui.QPushButton("Find Variable")
self.fieldButton.clicked.connect(self.chooseField)
self.layout.addWidget(QtGui.QLabel("Vfield"), 2, 0)
self.field = QtGui.QLineEdit("")
self.layout.addWidget(self.field, 2, 1)
self.layout.addWidget(QtGui.QLabel("or"), 2, 2)
self.layout.addWidget(self.fieldButton, 2, 3)
self.levelButton = QtGui.QPushButton("Find Variable")
self.levelButton.clicked.connect(self.chooseLevel)
self.layout.addWidget(QtGui.QLabel("Vlevel"), 3, 0)
self.level = QtGui.QSpinBox()
self.layout.addWidget(self.level, 3, 1)
self.layout.addWidget(QtGui.QLabel("or"), 3, 2)
self.layout.addWidget(self.levelButton, 3, 3)
self.limsButton = QtGui.QPushButton("Find Variable")
self.limsButton.clicked.connect(self.chooseLims)
self.layout.addWidget(QtGui.QLabel("Vlims"), 4, 0)
self.layout.addWidget(self.limsButton, 4, 1, 1, 3)
self.name = QtGui.QLineEdit("GridDisplay")
self.layout.addWidget(QtGui.QLabel("name"), 5, 0)
self.layout.addWidget(self.name, 5, 1, 1, 3)
self.closeButton = QtGui.QPushButton("Start")
self.closeButton.clicked.connect(self.closeDialog)
self.layout.addWidget(self.closeButton, 6, 0, 1, 5)
def closeDialog(self):
self.done(QtGui.QDialog.Accepted)
def startDisplay(self):
self.exec_()
# if no Vgrid abort
if 'Vgrid' not in self.result:
self.result['Vgrid'] = Variable(None)
# common.ShowWarning("Must select a variable for Vgrid.")
# I'm allowing this to continue, but this will result in error
# if Vfield, Vlevel, Vlims were not select create new
field = str(self.field.text())
level = self.level.value()
if 'Vfield' not in self.result:
self.result['Vfield'] = Variable(field)
if 'VlevelZ' not in self.result:
self.result['VlevelZ'] = Variable(level)
self.result['name'] = str(self.name.text())
self.result['plot_type'] = str(self.plot_type.text())
return self.result
|
{
"content_hash": "8ef244fd84a0176b2e0eeb73b5a5f957",
"timestamp": "",
"source": "github",
"line_count": 1138,
"max_line_length": 78,
"avg_line_length": 36.63796133567663,
"alnum_prop": 0.5508226603348204,
"repo_name": "jjhelmus/artview",
"id": "ff329630712f06fddc6a13313263ea75047fd60b",
"size": "41694",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "artview/components/plot_grid_legacy.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "447864"
}
],
"symlink_target": ""
}
|
"""IPython Test Suite Runner.
This module provides a main entry point to a user script to test IPython
itself from the command line. There are two ways of running this script:
1. With the syntax `iptest all`. This runs our entire test suite by
calling this script (with different arguments) recursively. This
causes modules and package to be tested in different processes, using nose
or trial where appropriate.
2. With the regular nose syntax, like `iptest -vvs IPython`. In this form
the script simply calls nose, but with special command line flags and
plugins loaded.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
import glob
from io import BytesIO
import os
import os.path as path
import sys
from threading import Thread, Lock, Event
import warnings
import nose.plugins.builtin
from nose.plugins.xunit import Xunit
from nose import SkipTest
from nose.core import TestProgram
from nose.plugins import Plugin
from nose.util import safe_str
from IPython.utils.process import is_cmd_found
from IPython.utils.py3compat import bytes_to_str
from IPython.utils.importstring import import_item
from IPython.testing.plugin.ipdoctest import IPythonDoctest
from IPython.external.decorators import KnownFailure, knownfailureif
pjoin = path.join
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Warnings control
#-----------------------------------------------------------------------------
# Twisted generates annoying warnings with Python 2.6, as will do other code
# that imports 'sets' as of today
warnings.filterwarnings('ignore', 'the sets module is deprecated',
DeprecationWarning )
# This one also comes from Twisted
warnings.filterwarnings('ignore', 'the sha module is deprecated',
DeprecationWarning)
# Wx on Fedora11 spits these out
warnings.filterwarnings('ignore', 'wxPython/wxWidgets release number mismatch',
UserWarning)
# ------------------------------------------------------------------------------
# Monkeypatch Xunit to count known failures as skipped.
# ------------------------------------------------------------------------------
def monkeypatch_xunit():
try:
knownfailureif(True)(lambda: None)()
except Exception as e:
KnownFailureTest = type(e)
def addError(self, test, err, capt=None):
if issubclass(err[0], KnownFailureTest):
err = (SkipTest,) + err[1:]
return self.orig_addError(test, err, capt)
Xunit.orig_addError = Xunit.addError
Xunit.addError = addError
#-----------------------------------------------------------------------------
# Check which dependencies are installed and greater than minimum version.
#-----------------------------------------------------------------------------
def extract_version(mod):
return mod.__version__
def test_for(item, min_version=None, callback=extract_version):
"""Test to see if item is importable, and optionally check against a minimum
version.
If min_version is given, the default behavior is to check against the
`__version__` attribute of the item, but specifying `callback` allows you to
extract the value you are interested in. e.g::
In [1]: import sys
In [2]: from IPython.testing.iptest import test_for
In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info)
Out[3]: True
"""
try:
check = import_item(item)
except (ImportError, RuntimeError):
# GTK reports Runtime error if it can't be initialized even if it's
# importable.
return False
else:
if min_version:
if callback:
# extra processing step to get version to compare
check = callback(check)
return check >= min_version
else:
return True
# Global dict where we can store information on what we have and what we don't
# have available at test run time
have = {}
have['curses'] = test_for('_curses')
have['matplotlib'] = test_for('matplotlib')
have['numpy'] = test_for('numpy')
have['pexpect'] = test_for('IPython.external.pexpect')
have['pymongo'] = test_for('pymongo')
have['pygments'] = test_for('pygments')
have['qt'] = test_for('IPython.external.qt')
have['sqlite3'] = test_for('sqlite3')
have['tornado'] = test_for('tornado.version_info', (4,0), callback=None)
have['jinja2'] = test_for('jinja2')
have['mistune'] = test_for('mistune')
have['requests'] = test_for('requests')
have['sphinx'] = test_for('sphinx')
have['jsonschema'] = test_for('jsonschema')
have['terminado'] = test_for('terminado')
have['casperjs'] = is_cmd_found('casperjs')
have['phantomjs'] = is_cmd_found('phantomjs')
have['slimerjs'] = is_cmd_found('slimerjs')
min_zmq = (13,)
have['zmq'] = test_for('zmq.pyzmq_version_info', min_zmq, callback=lambda x: x())
#-----------------------------------------------------------------------------
# Test suite definitions
#-----------------------------------------------------------------------------
test_group_names = ['parallel', 'kernel', 'kernel.inprocess', 'config', 'core',
'extensions', 'lib', 'terminal', 'testing', 'utils',
'nbformat', 'qt', 'html', 'nbconvert'
]
class TestSection(object):
def __init__(self, name, includes):
self.name = name
self.includes = includes
self.excludes = []
self.dependencies = []
self.enabled = True
def exclude(self, module):
if not module.startswith('IPython'):
module = self.includes[0] + "." + module
self.excludes.append(module.replace('.', os.sep))
def requires(self, *packages):
self.dependencies.extend(packages)
@property
def will_run(self):
return self.enabled and all(have[p] for p in self.dependencies)
# Name -> (include, exclude, dependencies_met)
test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names}
# Exclusions and dependencies
# ---------------------------
# core:
sec = test_sections['core']
if not have['sqlite3']:
sec.exclude('tests.test_history')
sec.exclude('history')
if not have['matplotlib']:
sec.exclude('pylabtools'),
sec.exclude('tests.test_pylabtools')
# lib:
sec = test_sections['lib']
if not have['zmq']:
sec.exclude('kernel')
# We do this unconditionally, so that the test suite doesn't import
# gtk, changing the default encoding and masking some unicode bugs.
sec.exclude('inputhookgtk')
# We also do this unconditionally, because wx can interfere with Unix signals.
# There are currently no tests for it anyway.
sec.exclude('inputhookwx')
# Testing inputhook will need a lot of thought, to figure out
# how to have tests that don't lock up with the gui event
# loops in the picture
sec.exclude('inputhook')
# testing:
sec = test_sections['testing']
# These have to be skipped on win32 because they use echo, rm, cd, etc.
# See ticket https://github.com/ipython/ipython/issues/87
if sys.platform == 'win32':
sec.exclude('plugin.test_exampleip')
sec.exclude('plugin.dtexample')
# terminal:
if (not have['pexpect']) or (not have['zmq']):
test_sections['terminal'].exclude('console')
# parallel
sec = test_sections['parallel']
sec.requires('zmq')
if not have['pymongo']:
sec.exclude('controller.mongodb')
sec.exclude('tests.test_mongodb')
# kernel:
sec = test_sections['kernel']
sec.requires('zmq')
# The in-process kernel tests are done in a separate section
sec.exclude('inprocess')
# importing gtk sets the default encoding, which we want to avoid
sec.exclude('zmq.gui.gtkembed')
sec.exclude('zmq.gui.gtk3embed')
if not have['matplotlib']:
sec.exclude('zmq.pylab')
# kernel.inprocess:
test_sections['kernel.inprocess'].requires('zmq')
# extensions:
sec = test_sections['extensions']
# This is deprecated in favour of rpy2
sec.exclude('rmagic')
# autoreload does some strange stuff, so move it to its own test section
sec.exclude('autoreload')
sec.exclude('tests.test_autoreload')
test_sections['autoreload'] = TestSection('autoreload',
['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload'])
test_group_names.append('autoreload')
# qt:
test_sections['qt'].requires('zmq', 'qt', 'pygments')
# html:
sec = test_sections['html']
sec.requires('zmq', 'tornado', 'requests', 'sqlite3', 'jsonschema')
# The notebook 'static' directory contains JS, css and other
# files for web serving. Occasionally projects may put a .py
# file in there (MathJax ships a conf.py), so we might as
# well play it safe and skip the whole thing.
sec.exclude('static')
sec.exclude('tasks')
if not have['jinja2']:
sec.exclude('notebookapp')
if not have['pygments'] or not have['jinja2']:
sec.exclude('nbconvert')
if not have['terminado']:
sec.exclude('terminal')
# config:
# Config files aren't really importable stand-alone
test_sections['config'].exclude('profile')
# nbconvert:
sec = test_sections['nbconvert']
sec.requires('pygments', 'jinja2', 'jsonschema', 'mistune')
# Exclude nbconvert directories containing config files used to test.
# Executing the config files with iptest would cause an exception.
sec.exclude('tests.files')
sec.exclude('exporters.tests.files')
if not have['tornado']:
sec.exclude('nbconvert.post_processors.serve')
sec.exclude('nbconvert.post_processors.tests.test_serve')
# nbformat:
test_sections['nbformat'].requires('jsonschema')
#-----------------------------------------------------------------------------
# Functions and classes
#-----------------------------------------------------------------------------
def check_exclusions_exist():
from IPython.utils.path import get_ipython_package_dir
from IPython.utils.warn import warn
parent = os.path.dirname(get_ipython_package_dir())
for sec in test_sections:
for pattern in sec.exclusions:
fullpath = pjoin(parent, pattern)
if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'):
warn("Excluding nonexistent file: %r" % pattern)
class ExclusionPlugin(Plugin):
"""A nose plugin to effect our exclusions of files and directories.
"""
name = 'exclusions'
score = 3000 # Should come before any other plugins
def __init__(self, exclude_patterns=None):
"""
Parameters
----------
exclude_patterns : sequence of strings, optional
Filenames containing these patterns (as raw strings, not as regular
expressions) are excluded from the tests.
"""
self.exclude_patterns = exclude_patterns or []
super(ExclusionPlugin, self).__init__()
def options(self, parser, env=os.environ):
Plugin.options(self, parser, env)
def configure(self, options, config):
Plugin.configure(self, options, config)
# Override nose trying to disable plugin.
self.enabled = True
def wantFile(self, filename):
"""Return whether the given filename should be scanned for tests.
"""
if any(pat in filename for pat in self.exclude_patterns):
return False
return None
def wantDirectory(self, directory):
"""Return whether the given directory should be scanned for tests.
"""
if any(pat in directory for pat in self.exclude_patterns):
return False
return None
class StreamCapturer(Thread):
daemon = True # Don't hang if main thread crashes
started = False
def __init__(self, echo=False):
super(StreamCapturer, self).__init__()
self.echo = echo
self.streams = []
self.buffer = BytesIO()
self.readfd, self.writefd = os.pipe()
self.buffer_lock = Lock()
self.stop = Event()
def run(self):
self.started = True
while not self.stop.is_set():
chunk = os.read(self.readfd, 1024)
with self.buffer_lock:
self.buffer.write(chunk)
if self.echo:
sys.stdout.write(bytes_to_str(chunk))
os.close(self.readfd)
os.close(self.writefd)
def reset_buffer(self):
with self.buffer_lock:
self.buffer.truncate(0)
self.buffer.seek(0)
def get_buffer(self):
with self.buffer_lock:
return self.buffer.getvalue()
def ensure_started(self):
if not self.started:
self.start()
def halt(self):
"""Safely stop the thread."""
if not self.started:
return
self.stop.set()
os.write(self.writefd, b'\0') # Ensure we're not locked in a read()
self.join()
class SubprocessStreamCapturePlugin(Plugin):
name='subprocstreams'
def __init__(self):
Plugin.__init__(self)
self.stream_capturer = StreamCapturer()
self.destination = os.environ.get('IPTEST_SUBPROC_STREAMS', 'capture')
# This is ugly, but distant parts of the test machinery need to be able
# to redirect streams, so we make the object globally accessible.
nose.iptest_stdstreams_fileno = self.get_write_fileno
def get_write_fileno(self):
if self.destination == 'capture':
self.stream_capturer.ensure_started()
return self.stream_capturer.writefd
elif self.destination == 'discard':
return os.open(os.devnull, os.O_WRONLY)
else:
return sys.__stdout__.fileno()
def configure(self, options, config):
Plugin.configure(self, options, config)
# Override nose trying to disable plugin.
if self.destination == 'capture':
self.enabled = True
def startTest(self, test):
# Reset log capture
self.stream_capturer.reset_buffer()
def formatFailure(self, test, err):
# Show output
ec, ev, tb = err
captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace')
if captured.strip():
ev = safe_str(ev)
out = [ev, '>> begin captured subprocess output <<',
captured,
'>> end captured subprocess output <<']
return ec, '\n'.join(out), tb
return err
formatError = formatFailure
def finalize(self, result):
self.stream_capturer.halt()
def run_iptest():
"""Run the IPython test suite using nose.
This function is called when this script is **not** called with the form
`iptest all`. It simply calls nose with appropriate command line flags
and accepts all of the standard nose arguments.
"""
# Apply our monkeypatch to Xunit
if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'):
monkeypatch_xunit()
warnings.filterwarnings('ignore',
'This will be removed soon. Use IPython.testing.util instead')
arg1 = sys.argv[1]
if arg1 in test_sections:
section = test_sections[arg1]
sys.argv[1:2] = section.includes
elif arg1.startswith('IPython.') and arg1[8:] in test_sections:
section = test_sections[arg1[8:]]
sys.argv[1:2] = section.includes
else:
section = TestSection(arg1, includes=[arg1])
argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks
'--with-ipdoctest',
'--ipdoctest-tests','--ipdoctest-extension=txt',
# We add --exe because of setuptools' imbecility (it
# blindly does chmod +x on ALL files). Nose does the
# right thing and it tries to avoid executables,
# setuptools unfortunately forces our hand here. This
# has been discussed on the distutils list and the
# setuptools devs refuse to fix this problem!
'--exe',
]
if '-a' not in argv and '-A' not in argv:
argv = argv + ['-a', '!crash']
if nose.__version__ >= '0.11':
# I don't fully understand why we need this one, but depending on what
# directory the test suite is run from, if we don't give it, 0 tests
# get run. Specifically, if the test suite is run from the source dir
# with an argument (like 'iptest.py IPython.core', 0 tests are run,
# even if the same call done in this directory works fine). It appears
# that if the requested package is in the current dir, nose bails early
# by default. Since it's otherwise harmless, leave it in by default
# for nose >= 0.11, though unfortunately nose 0.10 doesn't support it.
argv.append('--traverse-namespace')
# use our plugin for doctesting. It will remove the standard doctest plugin
# if it finds it enabled
plugins = [ExclusionPlugin(section.excludes), IPythonDoctest(), KnownFailure(),
SubprocessStreamCapturePlugin() ]
# Use working directory set by parent process (see iptestcontroller)
if 'IPTEST_WORKING_DIR' in os.environ:
os.chdir(os.environ['IPTEST_WORKING_DIR'])
# We need a global ipython running in this process, but the special
# in-process group spawns its own IPython kernels, so for *that* group we
# must avoid also opening the global one (otherwise there's a conflict of
# singletons). Ultimately the solution to this problem is to refactor our
# assumptions about what needs to be a singleton and what doesn't (app
# objects should, individual shells shouldn't). But for now, this
# workaround allows the test suite for the inprocess module to complete.
if 'kernel.inprocess' not in section.name:
from IPython.testing import globalipapp
globalipapp.start_ipython()
# Now nose can run
TestProgram(argv=argv, addplugins=plugins)
if __name__ == '__main__':
run_iptest()
|
{
"content_hash": "c122fa92f8aca52d36320475dfad74ff",
"timestamp": "",
"source": "github",
"line_count": 518,
"max_line_length": 86,
"avg_line_length": 35.285714285714285,
"alnum_prop": 0.6167523799102747,
"repo_name": "wolfram74/numerical_methods_iserles_notes",
"id": "ecc1ee457cc6614acc088f0931649d54b07332d2",
"size": "18302",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "venv/lib/python2.7/site-packages/IPython/testing/iptest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "282435"
},
{
"name": "C++",
"bytes": "59801"
},
{
"name": "CSS",
"bytes": "2038"
},
{
"name": "FORTRAN",
"bytes": "3707"
},
{
"name": "Groff",
"bytes": "6753"
},
{
"name": "HTML",
"bytes": "37522"
},
{
"name": "JavaScript",
"bytes": "1368241"
},
{
"name": "Python",
"bytes": "31296026"
},
{
"name": "Shell",
"bytes": "3869"
},
{
"name": "Smarty",
"bytes": "21425"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_gurrcat.iff"
result.attribute_template_id = 9
result.stfName("monster_name","tusk_cat")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "6109028f296ed5f6414d47595411efa1",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 53,
"avg_line_length": 21.846153846153847,
"alnum_prop": 0.6866197183098591,
"repo_name": "obi-two/Rebelion",
"id": "63648c0ff557a4dd835e06dfcfc06f81a55d081c",
"size": "429",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/mobile/shared_gurrcat.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
}
|
"""Generates user-friendly single-band IPHAS DR2 detection catalogues.
Summary
-------
This module is used to transform the IPHAS detection tables that are kindly
provided by the ``imcore`` source detection and measurement tool of the
Cambridge Astronomical Survey Unit (CASU). Its main purpose is to convert the
columns in the CASU detection tables from instrumental units into more
user-friendly astronomical units (e.g. converting fluxes into magnitudes,
converting pixel coordinates into celestial coordinates, and adding
user-friendly warning flags to signal quality problems.) Whilst doing so,
the script will corrects for a range of known issues present in the data,
e.g. it will apply a custom fix to the World Coordinate System (WCS) where
necessary.
This module can also produce a table called 'metadata.fits' which details the
metadata of an entire directory of CASU detection tables, which is useful for
carrying out quality control.
Usage
-----
1. To create a table called 'metadata.fits' which tabulates the header
information for the entire set of IPHAS detection tables, run:
``detections.save_metadata()``
2. To prepare a list of zeropoints which enforces the fixed offset between
the r- and Halpha-band zeropoints:
``detections.sanitise_zeropoints()``
3. To convert all CASU-produced detection tables into the user-friendly format
following the IPHAS DR2 column conventions, use:
``detections.save_detections()``
Caveats and lessons learnt
--------------------------
During the creation of IPHAS DR2, we encountered the following issues
with the pipeline-processed data set from CASU:
* In very crowded fields, the astrometric solution can be off by more than
0.5 arcsec towards the edges of the CCDs. The problem is rare and has been
resolved ad-hoc using the code under ``scripts/tune-wcs``.
Prior to a future data release, it may be worth refining the astrometry
across the data set.
* The WCS standards have evolved over the years, and the WCS in some of the
older data is inconsistent with modern conventions. The function
``fix_wcs()`` in this module must be used before converting pixel to world
coordinates.
* Spurious sources frequently appear in the vignetted corners of CCDs 1 and 3,
and in general near CCD edges. It is very important to mask these out during
catalogue generation.
* Data from December 2003 are affected by unusual bad columns appearing in
CCD 3 (x=1244-1245) and CCD 4 (x=549), which were not masked out by the
confidence map. A significant number of spurious detections were found
near these columns. Detections made near these columns in said month are
currently flagged as containing bad pixels by the ``column_badPix()``
method in this module, which is a bit of a hack.
* The 'badpix' column is missing from the CASU pipeline-produced detection
tables across the first few months of the survey (i.e. 2003), hence poor
photometry as a result of bad pixels is ever-so-slightly more likely from
data in those months.
* The bandmerging script relies on the STILTS ``tmatchn multimode=group``
feature, which uses a "friends of friends" approach and does not guarantee
returning the best possible match in very crowded fields (which is a hard
problem).
* A very small number of images were found to lack the keywords EXTINCT,
APCOR, or PERCORR, which are necessary for computing magnitudes. This module
will assume default values for these keywords if they are missing, rather
than rejecting the data.
* Be aware that the MAGZPT keywords in the FITS headers supplied by the CASU
pipeline do not include the necessary correction for extinction, apcor,
or percorr.
Future improvements
-------------------
* This module does not correct for the radial geometric distortions at present,
for which we pay a price during the global re-calibration.
* It would be nice to look up the confidence value in the confidence map at the
position of each star, and include it as an extra column in the catalogue.
"""
from astropy.io import fits
from astropy.io import ascii
from astropy.table import Table
from astropy.time import Time
from astropy import wcs
from astropy import log
import numpy as np
import collections
import os
import sys
import datetime
import constants
import util
__author__ = 'Geert Barentsen'
__copyright__ = 'Copyright, The Authors'
__credits__ = ['Geert Barentsen', 'Hywel Farnhill',
'Janet Drew', 'Robert Greimel']
################################
# CONSTANTS & CONFIGURATION
################################
# Where the write output catalogues?
MYDESTINATION = os.path.join(constants.DESTINATION, 'detected')
util.setup_dir(MYDESTINATION)
# Yale Bright Star Catalogue (Vizier V50), filtered for IPHAS area and V < 4.5
BSC_PATH = os.path.join(constants.LIBDIR, 'BrightStarCat-iphas.fits')
BSC = fits.getdata(BSC_PATH, 1)
BRIGHT_RA = BSC['_RAJ2000'] # Read coordinates and brightness into memory
BRIGHT_DEC = BSC['_DEJ2000']
BRIGHT_VMAG = BSC['Vmag']
# Which extensions to expect in the fits catalogues?
EXTS = [1, 2, 3, 4] # Corresponds to INT/WFC CCD1, CCD2, CCD3, CCD4
# Table containing slight updates to WCS astrometric parameters
WCSFIXES_PATH = os.path.join(constants.PACKAGEDIR,
'wcs-tuning', 'wcs-fixes.csv')
WCSFIXES = ascii.read(WCSFIXES_PATH)
# Table detailing the pre-calibration zeropoints;
# the table differs from the original zeropoint values in the FITS headers
# by enforcing zp(r)-zp(Halpha)=3.14
ZEROPOINTS_TABLE_PATH = os.path.join(constants.DESTINATION,
'zeropoints-precalibration.csv')
try:
ZEROPOINTS_TABLE = ascii.read(ZEROPOINTS_TABLE_PATH)
except IOError:
log.warning("zeropoints pre-calibration table has not been created yet.")
ZEROPOINTS_TABLE = None
# Cache dict to hold the confidence maps for each filter/directory
confmaps = {'Halpha': {}, 'r': {}, 'i': {}}
# Ignore log of negative fluxes
np.seterr(invalid='ignore', divide='ignore')
###############
# CLASSES
###############
class CatalogueException(Exception):
"""
Exception raised when a catalogue has a known problem which cannot be
fixed, i.e. when the catalogue is considered useless.
"""
pass
class DetectionCatalogue():
"""
Reads in a detection catalogue in the format produced by the Cambridge
Astronomical Survey Unit's (CASU) imcore tool, and transforms it into a
UKIDSS-style catalogues with user-friendly columns.
Parameters
----------
path : str
Location of the CASU-style FITS catalogue.
only_accept_iphas : bool, optional
Raise a `CatalogueException` if the catalogue is not an IPHAS exposure.
"""
def __init__(self, path, only_accept_iphas=True):
"""Open and sanitise the detection catalogue.
As part of the constructor, the validity of the header is checked and
repaired if necessary, including the WCS astrometric solution,
the zeropoint and the exposure time.
"""
self.path = path
self.directory = '/'.join(path.split('/')[:-1])
self.filename = path.split('/')[-1]
try:
self.fits = fits.open(self.path)
except IOError, e:
raise CatalogueException('IOError: %s' % e)
# Check and fix the header; a CatalogueException is raised
# in case of non-resolvable header problems
self.check_header(only_accept_iphas)
self.fix_wcs() # IPHAS WCS solutions have foibles!
# Finally, store a few fixed values as properties
# because they are frequently needed and expensive to compute
self.objectcount = np.sum([self.fits[ccd].data.size for ccd in EXTS])
self.cat_path = self.strip_basedir(path) # Where is the catalogue?
self.image_path = self.get_image_path() # Where is the image?
self.conf_path = self.get_conf_path() # Where is the confidence map?
self.exptime = self.get_exptime() # Assumed exposure time
self.zeropoint = self.get_zeropoint() # Assumed zeropoint
def hdr(self, field, ext=1):
"""Return the value of the header keyword from extension `ext`."""
return self.fits[ext].header.get(field)
def check_header(self, only_accept_iphas):
"""
Checks the pipeline catalogue for known problems; fixes if possible.
If the catalogue is not suitable for the IPHAS data release because
of a known problem, a CatalogueException is raised.
Parameters
----------
only_accept_iphas : bool
Raise a `CatalogueException` if the file is not an IPHAS exposure.
"""
if only_accept_iphas:
# The OBJECT keyword must start with the word "intphas" or "iphas"
if not (self.hdr('OBJECT').startswith('intphas')
or self.hdr('OBJECT').startswith('iphas')):
raise CatalogueException('Not an IPHAS run, OBJECT = %s' %
self.hdr('OBJECT'))
# The filter must be one of Halpha/r/i
if not self.hdr('WFFBAND') in ['Halpha', 'r', 'i']:
raise CatalogueException('Unexpected filter, WFFBAND = %s' %
self.hdr('WFFBAND'))
for ccd in EXTS:
# Early versions of CASU catalogues chave multiple columns 'Blank'
# Numpy will throw an exception if multiple columns have the same
# name, so we need to rename these columns.
n_columns = len(self.fits[ccd].columns)
for col in xrange(24, n_columns, 1):
name = self.fits[ccd].columns[col].name
if name == 'Blank':
self.fits[ccd].columns[col].name = 'Blank%d' % col
# In early catalogues, the "Number" (SeqNo) field is called "No."
if self.fits[ccd].columns[0].name == 'No.':
self.fits[ccd].columns[0].name = 'Number'
# In a few cases the date/time is missing from the headers;
# we recovered these from the observing logs:
if self.fits[ccd].header['RUN'] == 755575:
self.fits[ccd].header['DATE-OBS'] = '2010-08-30'
self.fits[ccd].header['UTSTART'] = '03:52:00'
if self.fits[ccd].header['RUN'] == 948917:
self.fits[ccd].header['DATE-OBS'] = '2012-11-20'
self.fits[ccd].header['UTSTART'] = '02:48:00'
# The MJD-OBS keyword is sometimes missing when the header-packet
# from the Telescope Control System was not collected.
if self.fits[ccd].header['RUN'] in [755574, 755575, 940983, 942046,
942495, 943312, 948917]:
isostamp = (self.fits[ccd].header['DATE-OBS']
+ 'T' + self.fits[ccd].header['UTSTART'])
self.fits[ccd].header['MJD-OBS'] = Time(isostamp, scale='utc').mjd
# Some runs do not have date/time stored due to a glitch in the
# Telescope Control System. We consider this a show-stopper.
if not 'UTSTART' in self.fits[ccd].header:
raise CatalogueException('UTSTART keyword missing')
if not 'DATE-OBS' in self.fits[ccd].header:
raise CatalogueException('DATE-OBS keyword missing')
if not 'MJD-OBS' in self.fits[ccd].header:
raise CatalogueException('MJD-OBS keyword missing')
def fix_wcs(self):
"""
Updates the header if an improved WCS has been determined.
See the wcs-tuning sub-directory for information.
"""
# The headers contain a combination of old- and modern-
# style WCS parameters for the ZPN projection coefficients, which
# confuses libwcs. Moreover, in a few cases the keyword values
# are plainly wrong. Hence we remove the keywords.
for ccd in EXTS:
for kw in ['PV1_0', 'PV1_1', 'PV1_2', 'PV1_3',
'PV2_0', 'PV2_1', 'PV2_2', 'PV2_3',
'PV3_0', 'PV3_1', 'PV3_3', 'PV3_3',
'PROJP1', 'PROJP3', 'WAT1_001', 'WAT2_001',
'RADECSYS']:
del self.fits[ccd].header[kw] # Remove junk
# ..and enforce the pipeline's true defaults
self.fits[ccd].header['EQUINOX'] = 2000.0
self.fits[ccd].header['RADESYSa'] = 'ICRS'
self.fits[ccd].header['PV2_1'] = 1.0
self.fits[ccd].header['PV2_3'] = 220.0
self.fits[ccd].header['CUNIT1'] = 'deg'
self.fits[ccd].header['CUNIT2'] = 'deg'
# Is an updated (fixed) WCS available?
if self.hdr('RUN') in WCSFIXES['RUN']:
for ccd in EXTS:
idx = ((WCSFIXES['RUN'] == self.hdr('RUN'))
& (WCSFIXES['CCD'] == ccd))
if idx.sum() > 0:
log.info("WCS fixed: {0}[{1}].".format(self.hdr('RUN'),
ccd))
idx_fix = idx.nonzero()[0][-1]
for kw in ['CRVAL1', 'CRVAL2', 'CRPIX1', 'CRPIX2',
'CD1_1', 'CD1_2', 'CD2_1', 'CD2_2']:
self.fits[ccd].header[kw] = WCSFIXES[kw][idx_fix]
def get_image_path(self):
"""Returns the filename of the accompanying image FITS file.
Raises a CatalogueException if the image is missing.
"""
candidate = os.path.join(self.directory,
self.filename.split('_')[0] + '.fit')
if os.path.exists(candidate):
return self.strip_basedir(candidate)
else:
raise CatalogueException('No image found for %s' % (self.path))
def get_conf_path(self):
"""Return the filename of the accompanying confidence map."""
mydir = self.directory
myband = self.hdr('WFFBAND')
global confmaps
# The result from previous function calls are stored in 'confmaps'
if mydir not in confmaps[myband].keys():
# Some directories do not contain confidence maps
if mydir == os.path.join(constants.RAWDATADIR, 'iphas_nov2006c'):
candidatedir = os.path.join(constants.RAWDATADIR, 'iphas_nov2006b')
elif mydir == os.path.join(constants.RAWDATADIR, 'iphas_jul2008'):
candidatedir = os.path.join(constants.RAWDATADIR, 'iphas_aug2008')
elif mydir == os.path.join(constants.RAWDATADIR, 'iphas_oct2009'):
candidatedir = os.path.join(constants.RAWDATADIR, 'iphas_nov2009')
elif mydir == os.path.join(constants.RAWDATADIR, 'run10'):
candidatedir = os.path.join(constants.RAWDATADIR, 'run11')
elif mydir == os.path.join(constants.RAWDATADIR, 'run13'):
candidatedir = os.path.join(constants.RAWDATADIR, 'run12')
else:
candidatedir = mydir
# Try all possible names
for name in constants.CONF_NAMES[myband]:
candidate = os.path.join(candidatedir, name)
if os.path.exists(candidate):
confmaps[myband][mydir] = candidate # Success!
continue
# Return confidence map name if we found one, raise exception otherwise
try:
return self.strip_basedir(confmaps[myband][mydir])
except KeyError:
return None
#raise CatalogueException('No confidence map found in %s' % mydir)
def strip_basedir(self, path):
return path[len(constants.RAWDATADIR):]
def get_exptime(self):
"""Return the exposure time.
We do not simply return the 'EXPTIME' value recorded in the header,
because the WFC has a quirky nature to record the EXPTIME incorrectly.
In general, the *requested* exposure time is more reliable than the
recorded time, and hence we return the requested values which are
typical for the IPHAS survey.
This follows the original perl script from Brent:
{
my ($time) = @_;
if($time < 15 && abs($time-10) > 0.1){
return 10.00;
} elsif ($time > 15 && $time < 35 && abs($time-30) > 0.1){
return 30.00;
} elsif ($time > 100 && abs($time-120) > 0.1){
return 120.00;
} else {
return $time;
}
}
"""
t = self.hdr('EXPTIME')
# Anchor runs for which we know the i-band exptime (9.5s) can be trusted
# This was added in during the final stages of DR2 calibration.
if self.hdr('RUN') in [364687,
368903, 368904, 368923, 368925,
369998, 370073, 370076, 370084,
370095, 371652, 371695, 372557,
372684, 372707, 372751, 372771,
372880, 373106, 373111, 373698,
374904, 376449, 376461, 376463,
376481, 376493, 376530, 401548,
401566, 402270, 407505, 407580,
407586, 407598, 408287, 408296,
413548, 413566, 413596, 413783,
413804, 414671, 418169, 418190,
418196, 418310, 427588, 427820,
457662, 460468, 470277, 470592,
470822, 470852, 474652, 476050,
476131, 478320, 478434, 478609,
478645, 478720, 478795, 537478,
537544, 537550, 537565, 537623,
538318, 538354, 538366, 538406,
538595, 538601, 538759, 540932,
541185, 541717, 541948, 568871,
568892, 568937, 568970, 568982,
569666, 569768, 569816,
570005, 570559, 570601, 570754,
571311, 571362, 571377, 571704,
597412, 597469, 597778, 598536,
598710, 598865, 598880, 647562,
649761, 686153, 686264, 687199,
687757, 702703, 702724, 702769,
703360, 703408, 703741]:
log.info('EXPTIME {0}s trusted for run {1}'.format(t, self.hdr('RUN')))
return t
if t > 5 and t < 15 and abs(t-10) > 0.1:
return 10.00
elif t > 25 and t < 35 and abs(t-30) > 0.1:
return 30.00
elif t > 110 and t < 130 and abs(t-120) > 0.1:
return 120.00
else:
return t
def get_zeropoint(self):
"""Return the magnitude zeropoint corrected for extinction.
Returns the zeropoint for the exposure, which corresponds to the
'MAGZPT' value recorded in the header (unless an override appears
in the ZEROPOINTS_TABLE table), corrected for extinction.
"""
# Get the nightly zeropoint we want to adopt
# for H-alpha the zp(r)-zp(Halpha) for vega is enforced
# through the ZEROPOINTS_TABLE
if ZEROPOINTS_TABLE and (self.hdr('RUN') in ZEROPOINTS_TABLE['run']):
idx_run = np.argwhere(ZEROPOINTS_TABLE['run']
== self.hdr('RUN'))[0][0]
zp = ZEROPOINTS_TABLE[idx_run]['zp']
else:
zp = self.hdr('MAGZPT')
# Retrieve the airmass from the header
airmass = self.hdr('AIRMASS')
if airmass is None: # Two fields have the keyword missing :-(
airmass = 1.0
# Load the assumed extinction per unit airmass
# see http://www.ast.cam.ac.uk/~wfcsur/technical/photom/index.php
extinct = self.hdr('EXTINCT')
if extinct is None:
extinct = 0.0
# Correct the zeropoint for the exposure's specific airmass
return zp - (airmass - 1) * extinct
def get_percorr(self, ccd):
"""Returns the PERCORR value for a given ccd.
PERCORR is value in the header of each CCD. It is a magnitude
correction based on the median dark sky recorded in science frames
compared to the median for all the CCDs.
"""
mypercorr = self.hdr('PERCORR', ccd)
if mypercorr is None: # PERCORR keyword is sometimes missing
mypercorr = 0.0
return mypercorr
def column_runID(self):
"""Returns the FITS column with the runID for each star.
(which is of course identical for all stars from the same exposure)"""
runID = np.array([self.hdr('RUN')] * self.objectcount)
return fits.Column(name='runID', format='J', unit='Number', array=runID)
def column_band(self):
"""Returns the FITS column with the band name for each star."""
bandnames = {'r': 'r', 'i': 'i', 'Halpha': 'ha'}
myband = bandnames[self.hdr('WFFBAND')]
band = np.array([myband] * self.objectcount)
return fits.Column(name='band', format='2A', unit='String',
array=band)
def column_ccd(self):
"""Returns the FITS column with the CCD number for each star."""
ccds = np.concatenate([[ccd] * self.fits[ccd].data.size
for ccd in EXTS])
return fits.Column(name='ccd', format='B', unit='Number', array=ccds)
def column_seqNum(self):
"""Returns the FITS column with the running number for each star."""
# format must be 'J' (32-bit) because numbers larger than 32767 occur
return fits.Column(name='seqNum', format='J', unit='Number',
array=self.concat('Number'))
def column_detectionID(self, col_ccd, col_seqNum):
"""Returns the FITS column with the detectionIDs.
The detectionID is a unique identifier of the detection.
It is composed of the INT telescope run number (6 digits),
CCD number (1 digit) and a sequential source number (<= 6 digits).
"""
detectionID = np.array(['%d-%d-%d' % (self.hdr('RUN'),
col_ccd.array[i],
col_seqNum.array[i])
for i in xrange(self.objectcount)])
return fits.Column(name='detectionID', format='15A', unit='String',
array=detectionID)
def column_x(self):
"""Returns the FITS column for the X CCD pixel coordinate."""
return fits.Column(name='x', format='E', unit='Pixels',
array=self.concat('X_coordinate'))
def column_y(self):
"""Returns the FITS column for the Y CCD pixel coordinate."""
return fits.Column(name='y', format='E', unit='Pixels',
array=self.concat('Y_coordinate'))
def column_planeX(self):
"""Returns the FITS column with X coordinates in the focal plane.
The reference frame of the coordinates is the pixel system of CCD #4,
with the origin on the optical axis.
The following relations transform all the CCDs to the CCD#4 system
(Copied from http://www.ast.cam.ac.uk/~wfcsur/technical/astrometry)
Virtual transform constants: (from 30 pointings in ELAIS region)
0.10000E+01 -0.10013E-02 2113.94
0.58901E-03 0.10001E+01 -12.67
Location of rotator centre in CCD-space 1 -332.881 3041.61
-0.10272E-01 0.99992E+00 78.84
-0.10003E+01 -0.10663E-01 6226.05
Location of rotator centre in CCD-space 2 3177.58 1731.94
0.10003E+01 -0.23903E-02 -2096.52
0.24865E-02 0.10003E+01 21.93
Location of rotator centre in CCD-space 3 3880.40 2996.45
0.10000E+01 0.00000E+00 0.00
0.00000E+00 0.10000E+01 0.00
Location of rotator centre in CCD-space 4 1778.00 3029.00
The transforms are in the form
a b c
d e f
and based on CCD#4 pixel system
So to convert a CCD to the CCD#4 system take the pixel location (x,y)
on the CCD and apply the following transformation to it
x' = a*x + b*y + c
y' = d*x + e*y + f
to get to rotator centre replace c -> c-1778
f -> f-3029
"""
a = [0.10000E+01, -0.10272E-01, 0.10003E+01, 0.10000E+01]
b = [-0.10013E-02, 0.99992E+00, -0.23903E-02, 0.0]
c = [2113.94, 78.84, -2096.52, 0.00]
xn = np.array([])
for i, ccd in enumerate(EXTS):
myxn = (a[i]*self.fits[ccd].data.field('X_coordinate')
+ b[i]*self.fits[ccd].data.field('Y_coordinate')
+ c[i] - 1778)
xn = np.concatenate((xn, myxn))
return fits.Column(name='planeX', format='E', unit='Pixels', array=xn)
def column_planeY(self):
"""Returns the FITS column with Y coordinates in the focal plane.
See column_planeX() for details.
"""
d = [0.58901E-03, -0.10003E+01, 0.24865E-02, 0.00000E+00]
e = [0.10001E+01, -0.10663E-01, 0.10003E+01, 0.10000E+01]
f = [-12.67, 6226.05, 21.93, 0.00]
xi = np.array([])
for i in xrange(len(EXTS)):
ccd = EXTS[i]
myxi = (d[i]*self.fits[ccd].data.field('X_coordinate')
+ e[i]*self.fits[ccd].data.field('Y_coordinate')
+ f[i] - 3029)
xi = np.concatenate((xi, myxi))
return fits.Column(name='planeY', format='E', unit='Pixels', array=xi)
def column_sky(self):
return fits.Column(name='sky', format='E', unit='Counts',
array=self.concat('Skylev'))
def column_skyVar(self):
return fits.Column(name='skyVar', format='E', unit='Counts',
array=self.concat('Skyrms'))
def column_seeing(self):
seeing = np.concatenate([[constants.PXSCALE * self.hdr('SEEING', ccd)]
* self.fits[ccd].data.size
for ccd in EXTS])
return fits.Column(name='seeing', format='E', unit='arcsec',
array=seeing)
def column_gauSig(self):
return fits.Column(name='gauSig', format='E', unit='Number',
array=self.concat('Gaussian_sigma'))
def column_ell(self):
return fits.Column(name='ell', format='E', unit='Number',
array=self.concat('Ellipticity'))
def column_pa(self):
return fits.Column(name='pa', format='E', unit='Number',
array=self.concat('Position_angle'))
def column_class(self):
return fits.Column(name='class', format='I', unit='Flag',
array=self.concat('Classification'))
def column_classStat(self):
return fits.Column(name='classStat', format='E', unit='N-sigma',
array=self.concat('Statistic'))
def column_brightNeighb(self, ra, dec):
""" Returns an array of boolean flags indicating whether the stars
are within 10 arcmin of a star brighter than V < 4.5 """
flags = np.zeros(len(ra), dtype=bool) # Initialize result array
# Try all stars in the truncated bright star catalogue (BSC, Yale)
# which are nearby-ish
nearby = np.abs(dec[0] - BRIGHT_DEC) < 1.
for i in np.where(nearby)[0]:
d_ra = ra - BRIGHT_RA[i]
d_dec = dec - BRIGHT_DEC[i]
# Approx angular separation (Astronomical Algorithms Eq. 16.2)
d = np.sqrt((d_ra*np.cos(np.radians(dec)))**2 + d_dec**2)
# Flag bright neighbours if within 10 arcmin
if BRIGHT_VMAG[i] < 4: # Brighter than 4th magnitude
flags[d < 10/60.] = True
else: # Other stars in BSC; V < ~7
flags[d < 5/60.] = True
return fits.Column(name='brightNeighb', format='L', unit='Boolean',
array=flags)
def column_deblend(self):
"""Which stars have been deblended?"""
# For deblended images, only the 1st areal profile is computed
# and the other profile values are set to -1
deblend = (self.concat('Areal_3_profile') < 0)
return fits.Column(name='deblend', format='L', unit='Boolean',
array=deblend)
def column_saturated(self):
"""Which stars are saturated?"""
# We assume that stars which peak at >55000 counts cannot be
# measured accurately
saturated = np.concatenate([(self.fits[ccd].data.field('Peak_height')
> 55000)
for ccd in EXTS])
return fits.Column(name='saturated', format='L',
unit='Boolean', array=saturated)
def column_vignetted(self, col_planeX, col_planeY):
"""Which stars are too close to the corners of the focal plane?"""
# pixel distance from optical axis
x_plane = col_planeX.array
y_plane = col_planeY.array
r_plane = np.sqrt(np.power(x_plane, 2) + np.power(y_plane, 2))
# pixel distance from CCD center also matters
x = self.concat('X_coordinate')
y = self.concat('Y_coordinate')
r_ccd = np.sqrt(np.power(x-1024, 2) + np.power(y-2048, 2))
# Empirical condition for focal plane locations with poor image quality
vignetted = (r_plane + 2*r_ccd) > 7900 # pixels
return fits.Column(name='vignetted', format='L', unit='Boolean',
array=vignetted)
def column_truncated(self):
"""Which stars are too close to the CCD edges?"""
# Mark stars near the edges
avoidance = 4.0/0.333 # 4 Arcseconds
min_x = 1 + avoidance
max_x = 2048 - avoidance
min_y = 1 + avoidance
max_y = 4096 - avoidance
truncated = ((self.concat('X_coordinate') < min_x)
| (self.concat('X_coordinate') > max_x)
| (self.concat('Y_coordinate') < min_y)
| (self.concat('Y_coordinate') > max_y))
return fits.Column(name='truncated', format='L', unit='Boolean',
array=truncated)
def column_badPix(self, col_ccd, col_x, col_y):
"""Returns the FITS column indicating the number of bad pixels.
Note that bad pixel information is not given for the earliest runs.
"""
badpix = self.concat('Bad_pixels')
# The confidence map for dec2003 failed to mask out two bad columns;
# the hack below flags spurious sources near these columns.
if self.hdr('DATE-OBS')[0:7] == '2003-12': # dec2003
bad_column = (
((col_ccd.array == 4)
& (col_x.array > 548) & (col_x.array < 550))
|
((col_ccd.array == 3)
& (col_x.array > 1243) & (col_x.array < 1245)
& (col_y.array > 2048))
)
badpix[bad_column] = 99
return fits.Column(name='badPix', format='E', unit='Pixels',
array=badpix)
def column_errBits(self, col_brightNeighb, col_deblend, col_saturated,
col_vignetted, col_truncated, col_badPix):
"""Returns the numeric error quality bits as an integer.
Inspired by Hambly et al. (2008), e.g.:
http://surveys.roe.ac.uk/wsa/www/gloss_j.html#gpssource_jerrbits
bit decimal
0 2^0 = 1 Bright neighbour.
1 2^1 = 2 Deblended.
3 2^3 = 8 Saturated.
6 2^6 = 64 Vignetted.
7 2^7 = 128 Truncated.
15 2^15 = 32768 Bad pixels.
"""
# Note: booleans in FITS are stored as ord('F') / ord('T')
errBits = (1 * (col_brightNeighb.array > ord('F'))
+ 2 * (col_deblend.array > ord('F'))
+ 8 * (col_saturated.array > ord('F'))
+ 64 * (col_vignetted.array > ord('F'))
+ 128 * (col_truncated.array > ord('F'))
+ 32768 * (col_badPix.array >= 1))
return fits.Column(name='errBits', format='J',
unit='bitmask', array=errBits)
def column_night(self):
"""Column containing the YYYYMMDD identifier of the *night*
(i.e. evening)"""
mydate = datetime.datetime.strptime(
self.hdr('DATE-OBS')+' '+self.hdr('UTSTART')[0:2],
'%Y-%m-%d %H') # Dont parse seconds; they can be '60'
if mydate.hour < 12:
mydate -= datetime.timedelta(1) # Give date at start of night
night = np.array([mydate.strftime('%Y%m%d')] * self.objectcount)
return fits.Column(name='night', format='J', array=night)
def column_mjd(self):
mjd = np.array([self.hdr('MJD-OBS')] * self.objectcount)
return fits.Column(name='mjd', format='D', unit='Julian days',
array=mjd)
def column_posErr(self):
"""Astrometric fit RMS error (arcsec)"""
posErr = np.concatenate([[self.fits[ccd].header.get('STDCRMS')]
* self.fits[ccd].data.size
for ccd in EXTS]) # In arcsec
return fits.Column(name='posErr', format='E', unit='arcsec',
array=posErr)
def compute_magnitudes(self, n_pixels, flux_field, apcor_field):
"""Convert the flux counts to magnitudes.
Loops over the CCD extensions and compute the magnitudes assuming
mag = ZP - 2.5*log10(flux/EXPTIME) - (AIRMASS-1)*EXTINCT
- APCOR - PERCORR
Be aware that APCOR and PERCORR differ on a CCD-by-CCD basis.
For details see
http://apm3.ast.cam.ac.uk/~mike/iphas/README.catalogues
"""
magnitudes = np.array([]) # results are stored here
for ccd in EXTS:
# Load the array of fluxes
flux = self.fits[ccd].data.field(flux_field)
# Convert to magnitudes
# Note that self.zeropoint is already corrected for extinction
# as part of the get_zeropoint() method
mag = (self.zeropoint
- 2.5 * np.log10(flux / self.exptime)
- self.hdr(apcor_field, ccd)
- self.get_percorr(ccd))
# Concatenate with previous magnitudes in the for loop
magnitudes = np.concatenate((magnitudes, mag))
return magnitudes
def compute_magnitude_errors(self, n_pixels, flux_field, apcor_field):
"""Convert the flux errors to magnitude errors."""
errors = np.array([])
for ccd in EXTS:
# See http://apm3.ast.cam.ac.uk/~mike/iphas/README.catalogues
flux = self.fits[ccd].data.field(flux_field)
err_flux = np.sqrt((flux / self.hdr('GAIN', ccd))
+ n_pixels * (self.hdr('SKYNOISE', ccd)**2.))
err_mag = (2.5 / np.log(10)) * err_flux / flux
errors = np.concatenate((errors, err_mag))
return errors
def column_mag(self, name='aperMag2'):
"""Returns magnitude columns."""
# `mynames' defines the names of the different magnitudes and links
# them to the columns with flux values in the pipeline catalogue.
mynames = {'peakMag': 'peak', 'peakMagErr': 'peak',
'aperMag1': 'core1', 'aperMag1Err': 'core1',
'aperMag2': 'core', 'aperMag2Err': 'core',
'aperMag3': 'core2', 'aperMag3Err': 'core2',
'aperMag4': 'core3', 'aperMag4Err': 'core3',
'aperMag5': 'core4', 'aperMag5Err': 'core4'}
aperture = mynames[name]
if aperture == 'peak':
# Peak pixel
n_pixels = 1
flux_field = 'Peak_height'
apcor_field = 'APCORPK'
elif aperture == 'core1':
# Radius = 1/2 x rcore
# Corresponds to Apermag1 in mercats
n_pixels = np.pi * (0.5*self.hdr('RCORE'))**2
flux_field = 'Core1_flux'
apcor_field = 'APCOR1'
elif aperture == 'core':
# Radius = rcore
# Corresponds to Apermag2 in mercats
# When rcore is the default 3.5 pixels,
# this yields the default 2.3 arcsec diameter aperture
n_pixels = np.pi * self.hdr('RCORE')**2
flux_field = 'Core_flux'
apcor_field = 'APCOR'
elif aperture == 'core2':
# Radius = sqrt(2) x rcore
# Corresponds to Apermag3 in mercats
n_pixels = np.pi * (np.sqrt(2.0)*self.hdr('RCORE'))**2
flux_field = 'Core2_flux'
apcor_field = 'APCOR2'
elif aperture == 'core3':
# Radius = 2 x rcore
# Corresponds to Apermag4 in mercats
n_pixels = np.pi * (2.0*self.hdr('RCORE'))**2
flux_field = 'Core3_flux'
apcor_field = 'APCOR3'
elif aperture == 'core4':
# Radius = 2 sqrt(2) x rcore
# Corresponds to Apermag5 in mercats
n_pixels = np.pi * (2.0*np.sqrt(2.0)*self.hdr('RCORE'))**2
flux_field = 'Core4_flux'
apcor_field = 'APCOR4'
else:
raise CatalogueException('Did not understand requested aperture')
if name.endswith('Err'):
errors = self.compute_magnitude_errors(n_pixels, flux_field,
apcor_field)
return fits.Column(name=name, format='E',
unit='Sigma', array=errors)
else:
mag = self.compute_magnitudes(n_pixels, flux_field, apcor_field)
return fits.Column(name=name, format='E',
unit='Magnitude', array=mag)
def column_radec(self):
"""Returns RA/DEC using the pixel coordinates and the header WCS"""
ra = np.array([])
dec = np.array([])
for ccd in EXTS:
mywcs = wcs.WCS(self.fits[ccd].header, relax=True)
myra, mydec = mywcs.wcs_pix2world(
self.fits[ccd].data.field('X_coordinate'),
self.fits[ccd].data.field('Y_coordinate'),
1)
ra = np.concatenate((ra, myra))
dec = np.concatenate((dec, mydec))
col_ra = fits.Column(name='ra', format='D', unit='deg',
array=ra) # Double precision!
col_dec = fits.Column(name='dec', format='D', unit='deg',
array=dec) # Double precision!
return (col_ra, col_dec)
def get_metadata(self):
""" Returns a dictionary of exposure meta data."""
# 5-sigma depth
try:
e_5sig = (self.zeropoint
- 2.5 * np.log10(
5.0*np.sqrt(
np.sqrt(2.0)*np.pi*self.hdr('RCORE')**2.)
* self.hdr('SKYNOISE') / self.exptime)
- self.hdr('APCOR', 4)
- self.get_percorr(4))
except Exception:
e_5sig = None
# Name of the field
try:
field = self.hdr('OBJECT').split('_')[1].split(' ')[0]
except IndexError:
#raise CatalogueException('could not understand OBJECT keyword: '
# + '"{0}"'.format(self.hdr('OBJECT')))
field = ''
meta = collections.OrderedDict([
('catalogue', self.cat_path),
('image', self.image_path),
('conf', self.conf_path),
('run', self.hdr('RUN')),
('object', self.hdr('OBJECT')),
('ra', self.hdr('RA')),
('dec', self.hdr('DEC')),
('field', field),
('n_objects', self.objectcount),
('SEEING', (constants.PXSCALE * np.mean([self.hdr('SEEING', ccd)
for ccd in EXTS]))),
('CCD1_SEEING', constants.PXSCALE * self.hdr('SEEING', 1)),
('CCD2_SEEING', constants.PXSCALE * self.hdr('SEEING', 2)),
('CCD3_SEEING', constants.PXSCALE * self.hdr('SEEING', 3)),
('CCD4_SEEING', constants.PXSCALE * self.hdr('SEEING', 4)),
('ELLIPTIC', np.mean([self.hdr('ELLIPTIC', i) for i in EXTS])),
('CCD1_ELLIPTIC', self.hdr('ELLIPTIC', 1)),
('CCD2_ELLIPTIC', self.hdr('ELLIPTIC', 2)),
('CCD3_ELLIPTIC', self.hdr('ELLIPTIC', 3)),
('CCD4_ELLIPTIC', self.hdr('ELLIPTIC', 4)),
('5sig', e_5sig),
('AIRMASS', self.hdr('AIRMASS')),
('RCORE', self.hdr('RCORE')),
('CROWDED', self.hdr('CROWDED')),
('CCD1_SKYLEVEL', self.hdr('SKYLEVEL', 1)),
('CCD2_SKYLEVEL', self.hdr('SKYLEVEL', 2)),
('CCD3_SKYLEVEL', self.hdr('SKYLEVEL', 3)),
('CCD4_SKYLEVEL', self.hdr('SKYLEVEL', 4)),
('CCD1_SKYNOISE', self.hdr('SKYNOISE', 1)),
('CCD2_SKYNOISE', self.hdr('SKYNOISE', 2)),
('CCD3_SKYNOISE', self.hdr('SKYNOISE', 3)),
('CCD4_SKYNOISE', self.hdr('SKYNOISE', 4)),
('MAGZPT', self.hdr('MAGZPT')),
('MAGZRR', self.hdr('MAGZRR')),
('EXTINCT', self.hdr('EXTINCT')),
('CCD1_APCOR', self.hdr('APCOR', 1)),
('CCD2_APCOR', self.hdr('APCOR', 2)),
('CCD3_APCOR', self.hdr('APCOR', 3)),
('CCD4_APCOR', self.hdr('APCOR', 4)),
('CCD1_PERCORR', self.get_percorr(1)),
('CCD2_PERCORR', self.get_percorr(2)),
('CCD3_PERCORR', self.get_percorr(3)),
('CCD4_PERCORR', self.get_percorr(4)),
('CCD1_GAIN', self.hdr('GAIN', 1)),
('CCD2_GAIN', self.hdr('GAIN', 2)),
('CCD3_GAIN', self.hdr('GAIN', 3)),
('CCD4_GAIN', self.hdr('GAIN', 4)),
('CCD1_STDCRMS', self.hdr('STDCRMS', 1)),
('CCD2_STDCRMS', self.hdr('STDCRMS', 2)),
('CCD3_STDCRMS', self.hdr('STDCRMS', 3)),
('CCD4_STDCRMS', self.hdr('STDCRMS', 4)),
('CCD1_CRPIX1', self.hdr('CRPIX1', 1)),
('CCD1_CRPIX2', self.hdr('CRPIX2', 1)),
('CCD1_CRVAL1', self.hdr('CRVAL1', 1)),
('CCD1_CRVAL2', self.hdr('CRVAL2', 1)),
('CCD1_CD1_1', self.hdr('CD1_1', 1)),
('CCD1_CD1_2', self.hdr('CD1_2', 1)),
('CCD1_CD2_1', self.hdr('CD2_1', 1)),
('CCD1_CD2_2', self.hdr('CD2_2', 1)),
('CCD1_PV2_1', self.hdr('PV2_1', 1)),
('CCD1_PV2_2', self.hdr('PV2_2', 1)),
('CCD1_PV2_3', self.hdr('PV2_3', 1)),
('CCD2_CRPIX1', self.hdr('CRPIX1', 2)),
('CCD2_CRPIX2', self.hdr('CRPIX2', 2)),
('CCD2_CRVAL1', self.hdr('CRVAL1', 2)),
('CCD2_CRVAL2', self.hdr('CRVAL2', 2)),
('CCD2_CD1_1', self.hdr('CD1_1', 2)),
('CCD2_CD1_2', self.hdr('CD1_2', 2)),
('CCD2_CD2_1', self.hdr('CD2_1', 2)),
('CCD2_CD2_2', self.hdr('CD2_2', 2)),
('CCD2_PV2_1', self.hdr('PV2_1', 2)),
('CCD2_PV2_2', self.hdr('PV2_2', 2)),
('CCD2_PV2_3', self.hdr('PV2_3', 2)),
('CCD3_CRPIX1', self.hdr('CRPIX1', 3)),
('CCD3_CRPIX2', self.hdr('CRPIX2', 3)),
('CCD3_CRVAL1', self.hdr('CRVAL1', 3)),
('CCD3_CRVAL2', self.hdr('CRVAL2', 3)),
('CCD3_CD1_1', self.hdr('CD1_1', 3)),
('CCD3_CD1_2', self.hdr('CD1_2', 3)),
('CCD3_CD2_1', self.hdr('CD2_1', 3)),
('CCD3_CD2_2', self.hdr('CD2_2', 3)),
('CCD3_PV2_1', self.hdr('PV2_1', 3)),
('CCD3_PV2_2', self.hdr('PV2_2', 3)),
('CCD3_PV2_3', self.hdr('PV2_3', 3)),
('CCD4_CRPIX1', self.hdr('CRPIX1', 4)),
('CCD4_CRPIX2', self.hdr('CRPIX2', 4)),
('CCD4_CRVAL1', self.hdr('CRVAL1', 4)),
('CCD4_CRVAL2', self.hdr('CRVAL2', 4)),
('CCD4_CD1_1', self.hdr('CD1_1', 4)),
('CCD4_CD1_2', self.hdr('CD1_2', 4)),
('CCD4_CD2_1', self.hdr('CD2_1', 4)),
('CCD4_CD2_2', self.hdr('CD2_2', 4)),
('CCD4_PV2_1', self.hdr('PV2_1', 4)),
('CCD4_PV2_2', self.hdr('PV2_2', 4)),
('CCD4_PV2_3', self.hdr('PV2_3', 4)),
('CCDSPEED', self.hdr('CCDSPEED')),
('OBSERVER', self.hdr('OBSERVER')),
('DAZSTART', self.hdr('DAZSTART')),
('TIME', self.hdr('DATE-OBS')+' '+self.hdr('UTSTART')),
('MJD-OBS', self.hdr('MJD-OBS')),
('EXPTIME', self.hdr('EXPTIME')),
('WFFPOS', self.hdr('WFFPOS')),
('WFFBAND', self.hdr('WFFBAND')),
('WFFID', self.hdr('WFFID')),
('zeropoint_precalib', self.zeropoint),
('exptime_precalib', self.exptime)
])
return meta
def concat(self, name):
"""Returns the concatenated array of a column across all ccds"""
if name in self.fits[1].columns.names:
return np.concatenate([self.fits[ccd].data.field(name)
for ccd in EXTS])
else:
# If the key does not exist, return an array of None's
return np.concatenate([[None]*self.fits[ccd].data.size
for ccd in EXTS])
def save_detections(self):
"""Create the columns of the output FITS table and save them.
Reminder: the fits data types used are:
L = boolean (1 byte?)
X = bit
B = unsigned byte
I = 16-bit int
J = 32-bit int
K = 64-bit int
A = 1-byte char
E = single
D = double
"""
output_filename = os.path.join(MYDESTINATION,
'%s_det.fits' % self.hdr('RUN'))
# Prepare columns on which others depend
col_ccd = self.column_ccd()
col_seqNum = self.column_seqNum()
col_ra, col_dec = self.column_radec()
col_x = self.column_x()
col_y = self.column_y()
col_planeX = self.column_planeX()
col_planeY = self.column_planeY()
# Error flags
col_brightNeighb = self.column_brightNeighb(col_ra.array, col_dec.array)
col_deblend = self.column_deblend()
col_saturated = self.column_saturated()
col_vignetted = self.column_vignetted(col_planeX, col_planeY)
col_truncated = self.column_truncated()
col_badPix = self.column_badPix(col_ccd, col_x, col_y)
col_errBits = self.column_errBits(col_brightNeighb, col_deblend,
col_saturated, col_vignetted,
col_truncated, col_badPix)
# Write the output fits table
cols = fits.ColDefs([self.column_detectionID(col_ccd, col_seqNum),
self.column_runID(),
col_ccd,
col_seqNum,
self.column_band(),
col_x,
col_y,
col_planeX,
col_planeY,
col_ra,
col_dec,
self.column_posErr(),
self.column_gauSig(),
self.column_ell(),
self.column_pa(),
self.column_mag('peakMag'),
self.column_mag('peakMagErr'),
self.column_mag('aperMag1'),
self.column_mag('aperMag1Err'),
self.column_mag('aperMag2'),
self.column_mag('aperMag2Err'),
self.column_mag('aperMag3'),
self.column_mag('aperMag3Err'),
self.column_sky(),
self.column_skyVar(),
self.column_class(),
self.column_classStat(),
col_brightNeighb,
col_deblend,
col_saturated,
col_vignetted,
col_truncated,
col_badPix,
col_errBits,
self.column_night(),
self.column_mjd(),
self.column_seeing()])
hdu_table = fits.new_table(cols, tbtype='BinTableHDU')
# Copy some of the original keywords to the new catalogue
for kw in ['RUN', 'OBSERVAT', 'LATITUDE', 'LONGITUD', 'HEIGHT',
'OBSERVER',
'OBJECT', 'RA', 'DEC', 'EQUINOX', 'RADECSYS',
'MJD-OBS', 'JD', 'DATE-OBS', 'UTSTART',
'INSTRUME', 'WFFPOS', 'WFFBAND', 'WFFPSYS', 'WFFID',
'EXPTIME', 'AIRMASS', 'MAGZPT', 'MAGZRR']:
hdu_table.header[kw] = self.hdr(kw, 4)
for ext in EXTS:
hdu_table.header['SEEING%d' % ext] = self.hdr('SEEING', ext)
hdu_table.header['ELLIP%d' % ext] = self.hdr('ELLIPTIC', ext)
hdu_table.header['SKY%d' % ext] = self.hdr('SKYLEVEL', ext)
hdu_table.header['PERCORR%d' % ext] = self.get_percorr(ext)
hdu_table.header['EXPTUSED'] = self.exptime
hdu_table.header['ZPINIT'] = self.zeropoint
hdu_table.header['CATALOG'] = self.cat_path
hdu_table.header['IMAGE'] = self.image_path
hdu_table.header['CONFMAP'] = self.conf_path
hdu_primary = fits.PrimaryHDU()
hdulist = fits.HDUList([hdu_primary, hdu_table])
hdulist.writeto(output_filename, clobber=True)
######################
# FUNCTIONS
######################
def list_catalogues(directory):
"""List all CASU object detection catalogues in a given directory.
Parameters
----------
directory : str
Path to the directory containing CASU's pipelined data.
This directory will be searched recursively.
Returns
-------
catalogues : list
List of all files in ``directory`` that have suffix '_cat.fits'.
"""
log.info('Searching for catalogues in %s' % directory)
catalogues = []
for mydir in os.walk(directory, followlinks=True):
log.info('Entering %s' % mydir[0])
for filename in mydir[2]:
# Only consider files of the form *_cat.fits
if filename.endswith("_cat.fits"):
catalogues.append(os.path.join(mydir[0], filename))
log.info('Found %d catalogues' % len(catalogues))
return catalogues
def get_metadata(path):
"""Returns a dictionary with the metadata for a FITS detection table.
Parameters
----------
path : str
Path to the FITS detection table.
Returns
-------
metadata : dict
The set of keyword-value pairs describing the detection table.
"""
with log.log_to_file(os.path.join(constants.LOGDIR, 'index.log')):
import socket
log.info(util.get_pid()+': '+path)
metadata = None
try:
cat = DetectionCatalogue(path, only_accept_iphas=False)
metadata = cat.get_metadata()
# Replace "None" values to NaNs
# to enable FITS to treat numeric columns correctly
for keyword in metadata:
if metadata[keyword] == None:
metadata[keyword] = np.nan
except CatalogueException, e:
log.warning('%s: CatalogueException: %s' % (path, e))
return None
except Exception, e:
log.error('%s: *UNEXPECTED EXCEPTION*: %s' % (path, e))
return None
return metadata
def save_metadata(clusterview,
target=os.path.join(constants.DESTINATION, 'metadata.fits'),
data=constants.RAWDATADIR):
"""Produces a table detailing the properties of all runs.
Parameters
----------
clusterview : IPython.parallel.client.view.View (or similar)
Cluster view object to carry out the processing.
"""
# Get the metadata for all catalogues in parallel on the cluster
catalogues = list_catalogues(data)
results = clusterview.map(get_metadata, catalogues, block=True)
metadata = []
for row in results:
if row is not None:
metadata.append(row)
t = Table(metadata, names=row.keys())
# Finally, create and write the output table
#t = Table(rows, names=row.keys())
t.write(target, format='fits', overwrite=True)
def sanitise_zeropoints():
"""Writes a CSV file containing zeropoint overrides.
The file produced is used to enforce a fixed offset between the r- and Ha-
band zeropoints at the time of creating the augmented catalogues.
"""
filename_runs = os.path.join(constants.DESTINATION,
'runs.csv')
runs = ascii.read(filename_runs)
out = file(ZEROPOINTS_TABLE_PATH, 'w')
out.write('run,zp\n')
# Override each H-alpha zeropoint by enforcing zp(r) - zp(Halpha) = 3.14
# which is what is imposed by Vega
for row in runs:
if row['WFFBAND'] == 'Halpha':
# Find the index of an r-band run in the same night
idx_r = np.argwhere(
(np.abs(row['MJD-OBS'] - runs.field('MJD-OBS')) < 0.4)
& (runs.field('WFFBAND') == 'r'))[0][0]
zp = float(runs[idx_r]['MAGZPT']) - 3.14
out.write('{0},{1}\n'.format(row['run'], zp))
out.close()
def convert_one(path):
"""Created a catalogue from one given pipeline table.
path -- of the pipeline table.
"""
with log.log_to_file(os.path.join(constants.LOGDIR,
'detections.log')):
try:
import socket
pid = socket.gethostname()+'/'+str(os.getpid())
log.info('START:'+pid+': '+path)
cat = DetectionCatalogue(path)
cat.save_detections()
log.info('FINISH:'+pid+': '+path)
except CatalogueException, e:
log.warning('%s: CatalogueException: %s' % (path, e))
return None
except Exception, e:
log.error('%s: *UNEXPECTED EXCEPTION*: %s' % (path, e))
return None
def convert_catalogues(clusterview, data=constants.RAWDATADIR):
"""Creates catalogues for all pipeline tables found in the data directory.
clusterview -- IPython.parallel cluster view
data -- directory containing Cambridge's pipeline catalogues.
"""
# Make sure the output directory exists
target = os.path.join(constants.DESTINATION, 'detected')
util.setup_dir(target)
# Create a list of all pipeline catalogues?
catalogues = list_catalogues(data)
# Run the conversion for each catalogue
result = clusterview.map(convert_one, catalogues, block=True)
return result
###################
# MAIN EXECUTION
###################
if __name__ == '__main__':
log.setLevel('INFO')
#Some test-cases:
#convert_one(constants.RAWDATADIR+'/iphas_aug2004a/r413424_cat.fits')
#sanitise_zeropoints()
#print get_metadata(constants.RAWDATADIR+'/uvex_oct2012/r942046_cat.fits')
|
{
"content_hash": "22c6f37a79ca7864be29aa1052c0edc0",
"timestamp": "",
"source": "github",
"line_count": 1298,
"max_line_length": 83,
"avg_line_length": 43.301232665639446,
"alnum_prop": 0.5471577261809447,
"repo_name": "barentsen/iphas-dr2",
"id": "c632c61ce5dfc7f163d92734f29c5e9e955e947b",
"size": "56251",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dr2/detections.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Perl",
"bytes": "3540"
},
{
"name": "Python",
"bytes": "274237"
},
{
"name": "Shell",
"bytes": "22508"
},
{
"name": "TeX",
"bytes": "794230"
}
],
"symlink_target": ""
}
|
from django.template import Context, Template
from django.test import TestCase
class TestTags(TestCase):
def render_template(self, string, context=None):
"""
helper function for template tag testing, from https://stackoverflow.com/a/1690879/
:param string:
:param context:
:return:
"""
context = context or {}
context = Context(context)
return Template(string).render(context)
def test_define(self):
{%- raw %}
template = """
{% load myfilters %}
{% if item %}
{% define "Edit" as action %}
{% else %}
{% define "Create" as action %}
{% endif %}
Would you like to {{action}} this item
"""
{%- endraw %}
expected_default = "Would you like to Create this item"
self.assertHTMLEqual(self.render_template(template), expected_default)
self.assertHTMLEqual(self.render_template(template, context={"item": False}), expected_default)
self.assertHTMLEqual(self.render_template(template, context={"item": True}), "Would you like to Edit this item")
|
{
"content_hash": "23bfd59b577520413c411766398f5f74",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 120,
"avg_line_length": 31.555555555555557,
"alnum_prop": 0.6038732394366197,
"repo_name": "Benoss/django-cookiecutter",
"id": "4117bad321d090e77938f83baef9874b6e004796",
"size": "1136",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/apps/core/templatetags/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "63"
},
{
"name": "HTML",
"bytes": "4539"
},
{
"name": "JavaScript",
"bytes": "2147"
},
{
"name": "Python",
"bytes": "24711"
},
{
"name": "Shell",
"bytes": "4879"
}
],
"symlink_target": ""
}
|
from __future__ import division # Standardmäßig float division - Ganzzahldivision kann man explizit mit '//' durchführen
from __future__ import absolute_import
import numpy as np
import math
import pygame
from pyecs import *
from pycompupipe.other import child_support_points
from pycompupipe.other import MathUtils
from pycompupipe.components import GuiElement
class DrawPath(Component):
"""docstring for DrawPath"""
def __init__(self, draw_event_name, arrow=0, color=(0,0,0), *args,**kwargs):
super(DrawPath, self).__init__(*args,**kwargs)
self.draw_event_name = draw_event_name
self.arrow = arrow
self.color = color
@component_callback
def component_attached(self):
self.entity.register_callback(self.draw_event_name, self.draw)
def draw(self, screen):
points = []
# build path from support points
for support in child_support_points(self.entity):
gui = support.get_component(GuiElement)
x,y = gui.entity.fire_callbacks_pipeline("position",(0.5,0.5))
points.append((x,y))
if len(points) >= 2:
# draw path
pygame.draw.aalines(screen, self.color, False, points)
if self.arrow > 0:
# define arrow points
above = -0.4*self.arrow, -0.4*self.arrow
below = -0.4*self.arrow, +0.4*self.arrow
points_arrow = np.array([above,(0,0),below])
# calculate angle of arrow
dx, dy = np.diff(points[-2:],axis=0)[0]
angle = math.atan2(float(dy),float(dx))
# rotate and position arrow points
points_arrow = MathUtils.rotate(points_arrow, angle)
points_arrow += points[-1]
# draw arrow
pygame.draw.aalines(screen, self.color, False, points_arrow)
|
{
"content_hash": "63f000d32cd84220425d99488f0a87fe",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 123,
"avg_line_length": 35.370370370370374,
"alnum_prop": 0.5968586387434555,
"repo_name": "xaedes/PyCompuPipe",
"id": "04ab8b71018d7559038b3bea93a7ff4c5ac7c902",
"size": "1960",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pycompupipe/components/drawing/draw_path.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "95234"
}
],
"symlink_target": ""
}
|
from base import BaseBot
from scraping import movement
class MovementBot(BaseBot):
"""Logging functions for the bot"""
def __init__(self, browser, config, planets):
self.movement_scraper = movement.Movement(browser, config)
super(MovementBot, self).__init__(browser, config, planets)
def check_hostile_activity(self):
fleet_movement = self.movement_scraper.get_fleet_movement()
hostile_movements = filter(lambda x: not x.friendly, fleet_movement)
if len(hostile_movements) == 0:
self.logger.info("There is no hostile activity now")
for hostile_movement in hostile_movements:
self.logger.warning(hostile_movement)
self.sms_sender.send_sms(hostile_movement)
|
{
"content_hash": "2f106c8832841998d7aac85d663a599c",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 76,
"avg_line_length": 37.85,
"alnum_prop": 0.6776750330250991,
"repo_name": "winiciuscota/OG-Bot",
"id": "dd3eeef5298f705da1e7523e1ce109051130810f",
"size": "757",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ogbot/core/movement.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "119264"
}
],
"symlink_target": ""
}
|
'''Reads alignments in PSL format and FASTA format and extract a region or
a sequence that do not mapped to a reference sequence.
A script requires a -singleHit parameter for pslRep.
'''
import sys
from collections import namedtuple
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
from Bio.Seq import Seq
SeqAlign = namedtuple('SeqAlign', ['name', 'chrom', 'qStart', 'qEnd'])
def parse_psl(psl_file):
print >> sys.stderr, 'parsing %s...' % psl_file
alignments = {}
for n, line in enumerate(open(psl_file)):
cols = line.strip().split()
name = cols[9]
qStart = int(cols[11])
qEnd = int(cols[12])
chrom = cols[13]
seqalign = SeqAlign(name, chrom, qStart, qEnd)
alignments[name] = seqalign
if n % 1000 == 0:
print >> sys.stderr, '...', n
return alignments
def extract(alignments, fasta_file, mincov=30, minlen=100):
print >> sys.stderr, 'parsing %s...' % fasta_file
for n, rec in enumerate(SeqIO.parse(fasta_file, 'fasta')):
seq = rec.seq.tostring()
try:
algn = alignments[rec.name]
except KeyError: # an entire sequence is unmapped
SeqIO.write(rec, sys.stdout, 'fasta')
else:
mapped_seq = seq[algn.qStart:algn.qEnd]
if float(len(mapped_seq))/len(rec) * 100 < mincov:
continue
head, sep, tail = seq.partition(mapped_seq)
if head and len(head) >= minlen:
seqrec = SeqRecord(id=rec.name+'_1',
description='',
seq=Seq(head))
SeqIO.write(seqrec, sys.stdout, 'fasta')
if tail and len(tail) >= minlen:
seqrec = SeqRecord(id=rec.name+'_2',
description='',
seq=Seq(tail))
SeqIO.write(seqrec, sys.stdout, 'fasta')
if n % 1000 == 0:
print >> sys.stderr, '...', n
if __name__=='__main__':
if len(sys.argv) < 5:
print >> sys.stderr, 'Usage: python extract_unmapped_seq.py' + \
' <psl file> <fasta file> [min coverage]' + \
'[min unmapped length]'
raise SystemExit
else:
psl_file = sys.argv[1]
fasta_file = sys.argv[2]
mincov = int(sys.argv[3])
minlen = int(sys.argv[4])
alignments = parse_psl(psl_file)
extract(alignments, fasta_file, mincov, minlen)
|
{
"content_hash": "c4708ed1dd0625596d38c536c61298a6",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 74,
"avg_line_length": 33.626666666666665,
"alnum_prop": 0.5400475812846947,
"repo_name": "likit/BioUtils",
"id": "8ef41152c9db34efa963063330ddf8b9f412641d",
"size": "2522",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "extract_mapped_seq.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "55417"
},
{
"name": "Shell",
"bytes": "305"
}
],
"symlink_target": ""
}
|
""" Models for display visual shapes whose attributes can be associated
with data columns from data sources.
"""
from __future__ import absolute_import
from ..plot_object import PlotObject
from ..mixins import FillProps, LineProps, TextProps
from ..enums import Direction, Anchor
from ..properties import AngleSpec, Bool, DistanceSpec, Enum, Float, Include, Instance, NumberSpec, StringSpec
from .mappers import LinearColorMapper
class Glyph(PlotObject):
""" Base class for all glyphs/marks/geoms/whatever-you-call-'em in Bokeh.
"""
visible = Bool(help="""
Whether the glyph should render or not.
""")
class AnnularWedge(Glyph):
""" Render annular wedges.
Example
-------
.. bokeh-plot:: ../tests/glyphs/AnnularWedge.py
:source-position: none
*source:* `tests/glyphs/AnnularWedge.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/AnnularWedge.py>`_
"""
x = NumberSpec("x", help="""
The x-coordinates of the center of the annular wedges.
""")
y = NumberSpec("y", help="""
The y-coordinates of the center of the annular wedges.
""")
inner_radius = DistanceSpec("inner_radius", help="""
The inner radii of the annular wedges.
""")
outer_radius = DistanceSpec("outer_radius", help="""
The outer radii of the annular wedges.
""")
start_angle = AngleSpec("start_angle", help="""
The angles to start the annular wedges, in radians, as measured from
the horizontal.
""")
end_angle = AngleSpec("end_angle", help="""
The angles to end the annular wedges, in radians, as measured from
the horizontal.
""")
direction = Enum(Direction, help="""
Which direction to stroke between the start and end angles.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the annular wedges.
""")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the annular wedges.
""")
class Annulus(Glyph):
""" Render annuli.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Annulus.py
:source-position: none
*source:* `tests/glyphs/Annulus.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Annulus.py>`_
"""
x = NumberSpec("x", help="""
The x-coordinates of the center of the annuli.
""")
y = NumberSpec("y", help="""
The y-coordinates of the center of the annuli.
""")
inner_radius = DistanceSpec("inner_radius", help="""
The inner radii of the annuli.
""")
outer_radius = DistanceSpec("outer_radius", help="""
The outer radii of the annuli.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the annuli.
""")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the annuli.
""")
class Arc(Glyph):
""" Render arcs.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Arc.py
:source-position: none
*source:* `tests/glyphs/Arc.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Arc.py>`_
"""
x = NumberSpec("x", help="""
The x-coordinates of the center of the arcs.
""")
y = NumberSpec("y", help="""
The y-coordinates of the center of the arcs.
""")
radius = DistanceSpec("radius", help="""
Radius of the arc.
""")
start_angle = AngleSpec("start_angle", help="""
The angles to start the arcs, in radians, as measured from the horizontal.
""")
end_angle = AngleSpec("end_angle", help="""
The angles to end the arcs, in radians, as measured from the horizontal.
""")
direction = Enum(Direction, help="""
Which direction to stroke between the start and end angles.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the arcs.
""")
class Bezier(Glyph):
u""" Render Bézier curves.
For more information consult the `Wikipedia article for Bézier curve`_.
.. _Wikipedia article for Bézier curve: http://en.wikipedia.org/wiki/Bézier_curve
Example
-------
.. bokeh-plot:: ../tests/glyphs/Bezier.py
:source-position: none
*source:* `tests/glyphs/Bezier.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Bezier.py>`_
"""
x0 = NumberSpec("x0", help="""
The x-coordinates of the starting points.
""")
y0 = NumberSpec("y0", help="""
The y-coordinates of the starting points.
""")
x1 = NumberSpec("x1", help="""
The x-coordinates of the ending points.
""")
y1 = NumberSpec("y1", help="""
The y-coordinates of the ending points.
""")
cx0 = NumberSpec("cx0", help="""
The x-coordinates of first control points.
""")
cy0 = NumberSpec("cy0", help="""
The y-coordinates of first control points.
""")
cx1 = NumberSpec("cx1", help="""
The x-coordinates of second control points.
""")
cy1 = NumberSpec("cy1", help="""
The y-coordinates of second control points.
""")
line_props = Include(LineProps, use_prefix=False, help=u"""
The %s values for the Bézier curves.
""")
class Gear(Glyph):
""" Render gears.
The details and nomenclature concerning gear construction can
be quite involved. For more information, consult the `Wikipedia
article for Gear`_.
.. _Wikipedia article for Gear: http://en.wikipedia.org/wiki/Gear
Example
-------
.. bokeh-plot:: ../tests/glyphs/Gear.py
:source-position: none
*source:* `tests/glyphs/Gear.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Gear.py>`_
"""
x = NumberSpec("x", help="""
The x-coordinates of the center of the gears.
""")
y = NumberSpec("y", help="""
The y-coordinates of the center of the gears.
""")
angle = AngleSpec(default=0, help="""
The angle the gears are rotated from horizontal. [rad]
""")
module = NumberSpec("module", help="""
A scaling factor, given by::
m = p / pi
where *p* is the circular pitch, defined as the distance from one
face of a tooth to the corresponding face of an adjacent tooth on
the same gear, measured along the pitch circle. [float]
""")
teeth = NumberSpec("teeth", help="""
How many teeth the gears have. [int]
""")
pressure_angle = NumberSpec(default=20, help= """
The complement of the angle between the direction that the teeth
exert force on each other, and the line joining the centers of the
two gears. [deg]
""")
# TODO: (bev) evidently missing a test for default value
shaft_size = NumberSpec(default=0.3, help="""
The central gear shaft size as a percentage of the overall gear
size. [float]
""")
# TODO: (bev) evidently missing a test for default value
internal = NumberSpec(default=False, help="""
Whether the gear teeth are internal. [bool]
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the gears.
""")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the gears.
""")
class Image(Glyph):
""" Render images given as scalar data together with a color
mapper.
"""
def __init__(self, **kwargs):
if 'palette' in kwargs and 'color_mapper' in kwargs:
raise ValueError("only one of 'palette' and 'color_mapper' may be specified")
elif 'color_mapper' not in kwargs:
# Use a palette (given or default)
palette = kwargs.pop('palette', 'Greys9')
mapper = LinearColorMapper(palette)
reserve_val = kwargs.pop('reserve_val', None)
if reserve_val is not None:
mapper.reserve_val = reserve_val
reserve_color = kwargs.pop('reserve_color', None)
if reserve_color is not None:
mapper.reserve_color = reserve_color
kwargs['color_mapper'] = mapper
super(Image, self).__init__(**kwargs)
image = NumberSpec("image", help="""
The arrays of scalar data for the images to be colormapped.
""")
x = NumberSpec("x", help="""
The x-coordinates to locate the image anchors.
""")
y = NumberSpec("y", help="""
The y-coordinates to locate the image anchors.
""")
dw = DistanceSpec("dw", help="""
The widths of the plot regions that the images will occupy.
.. note::
This is not the number of pixels that an image is wide.
That number is fixed by the image itself.
""")
dh = DistanceSpec("dh", help="""
The height of the plot region that the image will occupy.
.. note::
This is not the number of pixels that an image is tall.
That number is fixed by the image itself.
""")
dilate = Bool(False, help="""
Whether to always round fractional pixel locations in such a way
as to make the images bigger.
This setting may be useful if pixel rounding errors are causing
images to have a gap between them, when they should appear flush.
""")
color_mapper = Instance(LinearColorMapper, help="""
A ``ColorMapper`` to use to map the scalar data from ``image``
into RGBA values for display.
.. note::
The color mapping step happens on the client.
""")
# TODO: (bev) support anchor property for Image
# ref: https://github.com/bokeh/bokeh/issues/1763
class ImageRGBA(Glyph):
""" Render images given as RGBA data.
"""
image = NumberSpec("image", help="""
The arrays of RGBA data for the images.
""")
x = NumberSpec("x", help="""
The x-coordinates to locate the image anchors.
""")
y = NumberSpec("y", help="""
The y-coordinates to locate the image anchors.
""")
rows = NumberSpec("rows", help="""
The numbers of rows in the images
""")
cols = NumberSpec("cols", help="""
The numbers of columns in the images
""")
dw = DistanceSpec("dw", help="""
The widths of the plot regions that the images will occupy.
.. note::
This is not the number of pixels that an image is wide.
That number is fixed by the image itself.
""")
dh = DistanceSpec("dh", help="""
The height of the plot region that the image will occupy.
.. note::
This is not the number of pixels that an image is tall.
That number is fixed by the image itself.
""")
dilate = Bool(False, help="""
Whether to always round fractional pixel locations in such a way
as to make the images bigger.
This setting may be useful if pixel rounding errors are causing
images to have a gap between them, when they should appear flush.
""")
# TODO: (bev) support anchor property for ImageRGBA
# ref: https://github.com/bokeh/bokeh/issues/1763
class ImageURL(Glyph):
""" Render images loaded from given URLs.
Example
-------
.. bokeh-plot:: ../tests/glyphs/ImageURL.py
:source-position: none
*source:* `tests/glyphs/ImageURL.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/ImageURL.py>`_
"""
url = NumberSpec("url", help="""
The URLs to retrieve images from.
.. note::
The actual retrieving and loading of the images happens on
the client.
""")
x = NumberSpec("x", help="""
The x-coordinates to locate the image anchors.
""")
y = NumberSpec("y", help="""
The y-coordinates to locate the image anchors.
""")
# TODO: (bev) rename to "dw" for consistency
w = DistanceSpec("w", help="""
The widths of the plot regions that the images will occupy.
.. note::
This is not the number of pixels that an image is wide.
That number is fixed by the image itself.
.. note::
This may be renamed to "dw" in the future.
""")
# TODO: (bev) rename to "dh" for consistency
h = DistanceSpec("h", help="""
The height of the plot region that the image will occupy.
.. note::
This is not the number of pixels that an image is tall.
That number is fixed by the image itself.
.. note::
This may be renamed to "dh" in the future.
""")
angle = AngleSpec(default=0, help="""
The angles to rotate the images, in radians as measured from the
horizontal.
""")
global_alpha = Float(1.0, help="""
The opacity that each image is rendered with.
""")
dilate = Bool(False, help="""
Whether to always round fractional pixel locations in such a way
as to make the images bigger.
This setting may be useful if pixel rounding errors are causing
images to have a gap between them, when they should appear flush.
""")
anchor = Enum(Anchor, help="""
What position of the image should be anchored at the `x`, `y`
coordinates.
""")
class Line(Glyph):
""" Render a single line.
.. note::
The ``Line`` glyph is different from most other glyphs in that
the vector of values only produces one glyph on the Plot.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Line.py
:source-position: none
*source:* `tests/glyphs/Line.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Line.py>`_
"""
x = NumberSpec("x", help="""
The x-coordinates for the points of the line.
""")
y = NumberSpec("y", help="""
The y-coordinates for the points of the line.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the line.
""")
class MultiLine(Glyph):
""" Render several lines.
.. note::
The data for the ``MultiLine`` glyph is different in that the
vector of values is not a vector of scalars. Rather, it is a
"list of lists".
Example
-------
.. bokeh-plot:: ../tests/glyphs/MultiLine.py
:source-position: none
*source:* `tests/glyphs/MultiLine.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/MultiLine.py>`_
"""
xs = NumberSpec("xs", help="""
The x-coordinates for all the lines, given as a "list of lists".
""")
ys = NumberSpec("ys", help="""
The x-coordinates for all the lines, given as a "list of lists".
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the lines.
""")
class Oval(Glyph):
u""" Render ovals.
.. note::
This glyph renders ovals using Bézier curves, which are similar,
but not identical to ellipses.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Oval.py
:source-position: none
*source:* `tests/glyphs/Oval.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Oval.py>`_
"""
x = NumberSpec("x", help="""
The x-coordinates of the centers of the ovals.
""")
y = NumberSpec("y", help="""
The y-coordinates of the centers of the ovals.
""")
width = DistanceSpec("width", help="""
The overall widths of each oval.
""")
height = DistanceSpec("height", help="""
The overall height of each oval.
""")
angle = AngleSpec("angle", help="""
The angle the ovals are rotated from horizontal. [rad]
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the ovals.
""")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the ovals.
""")
class Patch(Glyph):
""" Render a single patch.
.. note::
The ``Patch`` glyph is different from most other glyphs in that
the vector of values only produces one glyph on the Plot.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Patch.py
:source-position: none
*source:* `tests/glyphs/Patch.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Patch.py>`_
"""
x = NumberSpec("x", help="""
The x-coordinates for the points of the patch.
.. note::
A patch may comprise multiple polygons. In this case the
x-coordinates for each polygon should be separated by NaN
values in the sequence.
""")
y = NumberSpec("y", help="""
The y-coordinates for the points of the patch.
.. note::
A patch may comprise multiple polygons. In this case the
y-coordinates for each polygon should be separated by NaN
values in the sequence.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the patch.
""")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the patch.
""")
class Patches(Glyph):
""" Render several patches.
.. note::
The data for the ``Patches`` glyph is different in that the
vector of values is not a vector of scalars. Rather, it is a
"list of lists".
Example
-------
.. bokeh-plot:: ../tests/glyphs/Patches.py
:source-position: none
*source:* `tests/glyphs/Patches.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Patches.py>`_
"""
xs = NumberSpec("xs", help="""
The x-coordinates for all the patches, given as a "list of lists".
.. note::
Individual patches may comprise multiple polygons. In this case
the x-coordinates for each polygon should be separated by NaN
values in the sublists.
""")
ys = NumberSpec("ys", help="""
The y-coordinates for all the patches, given as a "list of lists".
.. note::
Individual patches may comprise multiple polygons. In this case
the y-coordinates for each polygon should be separated by NaN
values in the sublists.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the patches.
""")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the patches.
""")
class Quad(Glyph):
""" Render axis-aligned quads.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Quad.py
:source-position: none
*source:* `tests/glyphs/Quad.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Quad.py>`_
"""
left = NumberSpec("left", help="""
The x-coordinates of the left edges.
""")
right = NumberSpec("right", help="""
The x-coordinates of the right edges.
""")
bottom = NumberSpec("bottom", help="""
The y-coordinates of the bottom edges.
""")
top = NumberSpec("top", help="""
The y-coordinates of the top edges.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the quads.
""")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the quads.
""")
class Quadratic(Glyph):
""" Render parabolas.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Quadratic.py
:source-position: none
*source:* `tests/glyphs/Quadratic.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Quadratic.py>`_
"""
x0 = NumberSpec("x0", help="""
The x-coordinates of the starting points.
""")
y0 = NumberSpec("y0", help="""
The y-coordinates of the starting points.
""")
x1 = NumberSpec("x1", help="""
The x-coordinates of the ending points.
""")
y1 = NumberSpec("y1", help="""
The y-coordinates of the ending points.
""")
cx = NumberSpec("cx", help="""
The x-coordinates of the control points.
""")
cy = NumberSpec("cy", help="""
The y-coordinates of the control points.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the parabolas.
""")
class Ray(Glyph):
""" Render rays.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Ray.py
:source-position: none
*source:* `tests/glyphs/Ray.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Ray.py>`_
"""
x = NumberSpec("x", help="""
The x-coordinates to start the rays.
""")
y = NumberSpec("y", help="""
The y-coordinates to start the rays.
""")
angle = AngleSpec("angle", help="""
The angles in radians to extend the rays, as measured from the
horizontal.
""")
length = DistanceSpec("length", help="""
The length to extend the ray. Note that this ``length`` defaults
to screen units.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the rays.
""")
class Rect(Glyph):
""" Render rectangles.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Rect.py
:source-position: none
*source:* `tests/glyphs/Rect.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Rect.py>`_
"""
x = NumberSpec("x", help="""
The x-coordinates of the centers of the rectangles.
""")
y = NumberSpec("y", help="""
The y-coordinates of the centers of the rectangles.
""")
width = DistanceSpec("width", help="""
The overall widths of the rectangles.
""")
height = DistanceSpec("height", help="""
The overall heights of the rectangles.
""")
angle = AngleSpec("angle", help="""
The angles to rotate the rectangles, in radians, as measured from
the horizontal.
""")
dilate = Bool(False, help="""
Whether to always round fractional pixel locations in such a way
as to make the rectangles bigger.
This setting may be useful if pixel rounding errors are causing
rectangles to have a gap between them, when they should appear
flush.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the rectangles.
""")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the rectangles.
""")
class Segment(Glyph):
""" Render segments.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Segment.py
:source-position: none
*source:* `tests/glyphs/Segment.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Segment.py>`_
"""
x0 = NumberSpec("x0", help="""
The x-coordinates of the starting points.
""")
y0 = NumberSpec("y0", help="""
The y-coordinates of the starting points.
""")
x1 = NumberSpec("x1", help="""
The x-coordinates of the ending points.
""")
y1 = NumberSpec("y1", help="""
The y-coordinates of the ending points.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the segments.
""")
class Text(Glyph):
""" Render text.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Text.py
:source-position: none
*source:* `tests/glyphs/Text.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Text.py>`_
"""
x = NumberSpec("x", help="""
The x-coordinates to locate the text anchors.
""")
y = NumberSpec("y", help="""
The y-coordinates to locate the text anchors.
""")
text = StringSpec("text", help="""
The text values to render.
""")
angle = AngleSpec(default=0, help="""
The angles to rotate the text, in radians,, as measured from the horizontal.
""")
x_offset = NumberSpec(default=0, help="""
Offset values to apply to the x-coordinates.
This is useful, for instance, if it is desired to "float" text a fixed
distance in screen units from a given data position.
""")
y_offset = NumberSpec(default=0, help="""
Offset values to apply to the y-coordinates.
This is useful, for instance, if it is desired to "float" text a fixed
distance in screen units from a given data position.
""")
text_props = Include(TextProps, use_prefix=False, help="""
The %s values for the text.
""")
class Wedge(Glyph):
""" Render wedges.
Example
-------
.. bokeh-plot:: ../tests/glyphs/Wedge.py
:source-position: none
*source:* `tests/glyphs/Wedge.py <https://github.com/bokeh/bokeh/tree/master/tests/glyphs/Wedge.py>`_
"""
x = NumberSpec("x", help="""
The x-coordinates of the points of the wedges.
""")
y = NumberSpec("y", help="""
The y-coordinates of the points of the wedges.
""")
radius = DistanceSpec("radius", help="""
Radii of the wedges.
""")
start_angle = AngleSpec("start_angle", help="""
The angles to start the wedges, in radians, as measured from the horizontal.
""")
end_angle = AngleSpec("end_angle", help="""
The angles to end the wedges, in radians as measured from the horizontal.
""")
direction = Enum(Direction, help="""
Which direction to stroke between the start and end angles.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the wedges.
""")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the wedges.
""")
# XXX: allow `from bokeh.models.glyphs import *`
from .markers import (Marker, Asterisk, Circle, CircleCross, CircleX, Cross,
Diamond, DiamondCross, InvertedTriangle, Square,
SquareCross, SquareX, Triangle, X)
# Fool pyflakes
(Marker, Asterisk, Circle, CircleCross, CircleX, Cross, Diamond, DiamondCross,
InvertedTriangle, Square, SquareCross, SquareX, Triangle, X)
|
{
"content_hash": "2fb1e55ec163f4d95798df477413a121",
"timestamp": "",
"source": "github",
"line_count": 973,
"max_line_length": 119,
"avg_line_length": 26.082219938335047,
"alnum_prop": 0.6164788399401055,
"repo_name": "daodaoliang/bokeh",
"id": "cd15e0575fe6415967abbe568934fda1d0eb3ac8",
"size": "25408",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "bokeh/models/glyphs.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5455"
},
{
"name": "CSS",
"bytes": "413395"
},
{
"name": "CoffeeScript",
"bytes": "1995470"
},
{
"name": "HTML",
"bytes": "1545838"
},
{
"name": "JavaScript",
"bytes": "4747"
},
{
"name": "Makefile",
"bytes": "5785"
},
{
"name": "Python",
"bytes": "1381168"
},
{
"name": "Shell",
"bytes": "13857"
}
],
"symlink_target": ""
}
|
""" order book on a security
"""
import logging
from sortedcontainers import SortedList
from orderbook.side import Side
from orderbook.trade import Trade
from orderbook.order_factory import create_order
class OrderBook:
""" implements a central limit order book on one security
"""
def __init__(self):
""" initialize
"""
self.tape = []
self.bids = SortedList()
self.asks = SortedList()
def process_order(self, quote):
""" process order given in argument quote
"""
order = create_order(quote)
logging.debug('before processing asks are %s', self.asks)
logging.debug('before processing bids are %s', self.bids)
trades = []
orders = self.asks if order.side == Side.BID else self.bids
while orders and order.quantity > 0:
if order.matches(orders[0]):
trade = Trade(order, orders[0])
order.quantity -= trade.quantity
orders[0].quantity -= trade.quantity
self.tape.append(trade)
trades.append(trade)
if not orders[0].quantity:
orders.pop(0)
else:
break
logging.debug('orders: %s, trades: %s, remaining quantity: %s',
orders, trades, order.quantity)
# If not fully filled update the book with a new order
# with remaining quantity
if order.quantity > 0:
orders = self.asks if order.side == Side.ASK else self.bids
orders.add(order)
order_in_book = order
else:
order_in_book = None
logging.debug('after processing asks are %s', self.asks)
logging.debug('after processing bids are %s', self.bids)
logging.debug('after processing trades are %s', trades)
return trades, order_in_book
def get_volume_at_price(self, side, price):
""" get the volume available in the orderbook
"""
bid_or_ask = Side[side.upper()]
volume = 0
orders = self.bids if bid_or_ask == Side.BID else self.asks
for i in orders:
if i.limit == price:
volume += i.quantity
return volume
def get_best_bid(self):
""" get best bid from orderbook
"""
return self.bids[0].limit
def get_worst_bid(self):
""" get worst bid from orderbook
"""
return self.bids[-1].limit
def get_best_ask(self):
""" get best ask from orderbook
"""
return self.asks[0].limit
def get_worst_ask(self):
""" get worst ask from orderbook
"""
return self.asks[-1].limit
def tape_dump(self, filename, filemode='w', tapemode=None):
""" write trades to file in arg filename
"""
with open(filename, filemode) as dumpfile:
template = 'Price: %s, Quantity: %s\n'
for i in self.tape:
dumpfile.write(template % (i['price'], i['quantity']))
if tapemode == 'wipe':
self.tape = []
def __str__(self):
""" orderbook representation
"""
ret = "***Bids***\n"
for value in self.bids:
ret += str(value)
ret += "\n***Asks***\n"
for value in self.asks:
ret += str(value)
ret += "\n***Trades***\n"
# print first 10 trades
for i in range(10):
try:
ret += str(self.tape[i]) + '\n'
except IndexError:
break
return ret + '\n'
|
{
"content_hash": "8a8d0d54fd6e4f969ee1c9ca11e85c07",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 75,
"avg_line_length": 28.559055118110237,
"alnum_prop": 0.5340501792114696,
"repo_name": "martyw/central-limit-order-book",
"id": "43a7c4601366a909a6a629a5b05926e1cd2a772e",
"size": "3627",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "orderbook/orderbook.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "28679"
}
],
"symlink_target": ""
}
|
"""
Test for weights in GLM, Poisson and OLS/WLS, continuous test_glm.py
Below is a table outlining the test coverage.
================================= ====================== ====== ===================== === ======= ======== ============== ============= ============== ============= ============== ==== =========
Test Compared To params normalized_cov_params bse loglike deviance resid_response resid_pearson resid_deviance resid_working resid_anscombe chi2 optimizer
================================= ====================== ====== ===================== === ======= ======== ============== ============= ============== ============= ============== ==== =========
TestGlmPoissonPlain stata X X X X X X X X X X bfgs
TestGlmPoissonFwNr stata X X X X X X X X X X bfgs
TestGlmPoissonAwNr stata X X X X X X X X X X bfgs
TestGlmPoissonFwHC stata X X X X X
TestGlmPoissonAwHC stata X X X X X
TestGlmPoissonFwClu stata X X X X X
TestGlmTweedieAwNr R X X X X X X X newton
TestGlmGammaAwNr R X X special X X X X X bfgs
TestGlmGaussianAwNr R X X special X X X X X bfgs
TestRepeatedvsAggregated statsmodels.GLM X X bfgs
TestRepeatedvsAverage statsmodels.GLM X X bfgs
TestTweedieRepeatedvsAggregated statsmodels.GLM X X bfgs
TestTweedieRepeatedvsAverage statsmodels.GLM X X bfgs
TestBinomial0RepeatedvsAverage statsmodels.GLM X X
TestBinomial0RepeatedvsDuplicated statsmodels.GLM X X bfgs
TestBinomialVsVarWeights statsmodels.GLM X X X bfgs
TestGlmGaussianWLS statsmodels.WLS X X X bfgs
================================= ====================== ====== ===================== === ======= ======== ============== ============= ============== ============= ============== ==== =========
""" # noqa: E501
import warnings
import numpy as np
from numpy.testing import assert_allclose, assert_raises
import pandas as pd
import pytest
import statsmodels.api as sm
# load data into module namespace
from statsmodels.datasets.cpunish import load
from statsmodels.genmod.generalized_linear_model import GLM
from statsmodels.tools.sm_exceptions import SpecificationWarning
from statsmodels.tools.tools import add_constant
from .results import (
res_R_var_weight as res_r,
results_glm_poisson_weights as res_stata,
)
cpunish_data = load()
cpunish_data.exog = np.asarray(cpunish_data.exog)
cpunish_data.endog = np.asarray(cpunish_data.endog)
cpunish_data.exog[:, 3] = np.log(cpunish_data.exog[:, 3])
cpunish_data.exog = add_constant(cpunish_data.exog, prepend=False)
class CheckWeight:
def test_basic(self):
res1 = self.res1
res2 = self.res2
assert_allclose(res1.params, res2.params, atol=1e-6, rtol=2e-6)
corr_fact = getattr(self, 'corr_fact', 1)
if hasattr(res2, 'normalized_cov_params'):
assert_allclose(res1.normalized_cov_params,
res2.normalized_cov_params,
atol=1e-8, rtol=2e-6)
if isinstance(self, (TestRepeatedvsAggregated, TestRepeatedvsAverage,
TestTweedieRepeatedvsAggregated,
TestTweedieRepeatedvsAverage,
TestBinomial0RepeatedvsAverage,
TestBinomial0RepeatedvsDuplicated)):
# Loglikelihood, scale, deviance is different between repeated vs.
# exposure/average
return None
assert_allclose(res1.bse, corr_fact * res2.bse, atol=1e-6, rtol=2e-6)
if isinstance(self, TestBinomialVsVarWeights):
# Binomial ll and deviance are different for 1d vs. counts...
return None
if isinstance(self, TestGlmGaussianWLS):
# This will not work right now either
return None
if not isinstance(self, (TestGlmGaussianAwNr, TestGlmGammaAwNr)):
# Matching R is hard
assert_allclose(res1.llf, res2.ll, atol=1e-6, rtol=1e-7)
assert_allclose(res1.deviance, res2.deviance, atol=1e-6, rtol=1e-7)
def test_residuals(self):
if isinstance(self, (TestRepeatedvsAggregated, TestRepeatedvsAverage,
TestTweedieRepeatedvsAggregated,
TestTweedieRepeatedvsAverage,
TestBinomial0RepeatedvsAverage,
TestBinomial0RepeatedvsDuplicated)):
# This will not match as different number of records
return None
res1 = self.res1
res2 = self.res2
if not hasattr(res2, 'resids'):
return None # use SkipError instead
resid_all = dict(zip(res2.resids_colnames, res2.resids.T))
assert_allclose(res1.resid_response, resid_all['resid_response'],
atol=1e-6, rtol=2e-6)
assert_allclose(res1.resid_pearson, resid_all['resid_pearson'],
atol=1e-6, rtol=2e-6)
assert_allclose(res1.resid_deviance, resid_all['resid_deviance'],
atol=1e-6, rtol=2e-6)
assert_allclose(res1.resid_working, resid_all['resid_working'],
atol=1e-6, rtol=2e-6)
if resid_all.get('resid_anscombe') is None:
return None
# Stata does not use var_weights in anscombe residuals, it seems.
# Adjust residuals to match our approach.
resid_a = res1.resid_anscombe
resid_a1 = resid_all['resid_anscombe'] * np.sqrt(res1._var_weights)
assert_allclose(resid_a, resid_a1, atol=1e-6, rtol=2e-6)
def test_compare_optimizers(self):
res1 = self.res1
if isinstance(res1.model.family, sm.families.Tweedie):
method = 'newton'
optim_hessian = 'eim'
else:
method = 'bfgs'
optim_hessian = 'oim'
if isinstance(self, (TestGlmPoissonFwHC, TestGlmPoissonAwHC,
TestGlmPoissonFwClu,
TestBinomial0RepeatedvsAverage)):
return None
start_params = res1.params
res2 = self.res1.model.fit(start_params=start_params, method=method,
optim_hessian=optim_hessian)
assert_allclose(res1.params, res2.params, atol=1e-3, rtol=2e-3)
H = res2.model.hessian(res2.params, observed=False)
res2_bse = np.sqrt(-np.diag(np.linalg.inv(H)))
assert_allclose(res1.bse, res2_bse, atol=1e-3, rtol=1e-3)
def test_pearson_chi2(self):
if hasattr(self.res2, 'chi2'):
assert_allclose(self.res1.pearson_chi2, self.res2.deviance_p,
atol=1e-6, rtol=1e-6)
def test_getprediction(self):
pred = self.res1.get_prediction()
assert_allclose(pred.linpred.se_mean, pred.linpred.se_mean, rtol=1e-10)
class TestGlmPoissonPlain(CheckWeight):
@classmethod
def setup_class(cls):
cls.res1 = GLM(cpunish_data.endog, cpunish_data.exog,
family=sm.families.Poisson()).fit()
cls.res2 = res_stata.results_poisson_none_nonrobust
class TestGlmPoissonFwNr(CheckWeight):
@classmethod
def setup_class(cls):
fweights = [1, 1, 1, 2, 2, 2, 3, 3, 3, 1, 1, 1, 2, 2, 2, 3, 3]
fweights = np.array(fweights)
cls.res1 = GLM(cpunish_data.endog, cpunish_data.exog,
family=sm.families.Poisson(), freq_weights=fweights
).fit()
cls.res2 = res_stata.results_poisson_fweight_nonrobust
class TestGlmPoissonAwNr(CheckWeight):
@classmethod
def setup_class(cls):
fweights = [1, 1, 1, 2, 2, 2, 3, 3, 3, 1, 1, 1, 2, 2, 2, 3, 3]
# faking aweights by using normalized freq_weights
fweights = np.array(fweights)
wsum = fweights.sum()
nobs = len(cpunish_data.endog)
aweights = fweights / wsum * nobs
cls.res1 = GLM(cpunish_data.endog, cpunish_data.exog,
family=sm.families.Poisson(), var_weights=aweights
).fit()
# Need to copy to avoid inplace adjustment
from copy import copy
cls.res2 = copy(res_stata.results_poisson_aweight_nonrobust)
cls.res2.resids = cls.res2.resids.copy()
# Need to adjust resids for pearson and deviance to add weights
cls.res2.resids[:, 3:5] *= np.sqrt(aweights[:, np.newaxis])
# prob_weights fail with HC, not properly implemented yet
class TestGlmPoissonPwNr(CheckWeight):
@classmethod
def setup_class(cls):
fweights = [1, 1, 1, 2, 2, 2, 3, 3, 3, 1, 1, 1, 2, 2, 2, 3, 3]
# faking aweights by using normalized freq_weights
fweights = np.array(fweights)
wsum = fweights.sum()
nobs = len(cpunish_data.endog)
aweights = fweights / wsum * nobs
cls.res1 = GLM(cpunish_data.endog, cpunish_data.exog,
family=sm.families.Poisson(), freq_weights=fweights
).fit(cov_type='HC1')
cls.res2 = res_stata.results_poisson_pweight_nonrobust
# TODO: find more informative reasons why these fail
@pytest.mark.xfail(reason='Known to fail', strict=True)
def test_basic(self):
super(TestGlmPoissonPwNr, self).test_basic()
@pytest.mark.xfail(reason='Known to fail', strict=True)
def test_compare_optimizers(self):
super(TestGlmPoissonPwNr, self).test_compare_optimizers()
class TestGlmPoissonFwHC(CheckWeight):
@classmethod
def setup_class(cls):
fweights = [1, 1, 1, 2, 2, 2, 3, 3, 3, 1, 1, 1, 2, 2, 2, 3, 3]
# faking aweights by using normalized freq_weights
fweights = np.array(fweights)
wsum = fweights.sum()
nobs = len(cpunish_data.endog)
aweights = fweights / wsum * nobs
cls.corr_fact = np.sqrt((wsum - 1.) / wsum)
mod = GLM(cpunish_data.endog, cpunish_data.exog,
family=sm.families.Poisson(),
freq_weights=fweights)
cls.res1 = mod.fit(cov_type='HC0')
# cov_kwds={'use_correction':False})
cls.res2 = res_stata.results_poisson_fweight_hc1
# var_weights (aweights fail with HC, not properly implemented yet
class TestGlmPoissonAwHC(CheckWeight):
@classmethod
def setup_class(cls):
fweights = [1, 1, 1, 2, 2, 2, 3, 3, 3, 1, 1, 1, 2, 2, 2, 3, 3]
# faking aweights by using normalized freq_weights
fweights = np.array(fweights)
wsum = fweights.sum()
nobs = len(cpunish_data.endog)
aweights = fweights / wsum * nobs
# This is really close when corr_fact = (wsum - 1.) / wsum, but to
# avoid having loosen precision of the assert_allclose, I'm doing this
# manually. Its *possible* lowering the IRLS convergence criterion
# in stata and here will make this less sketchy.
cls.corr_fact = np.sqrt((wsum - 1.) / wsum) * 0.98518473599905609
mod = GLM(cpunish_data.endog, cpunish_data.exog,
family=sm.families.Poisson(),
var_weights=aweights)
cls.res1 = mod.fit(cov_type='HC0')
# cov_kwds={'use_correction':False})
cls.res2 = res_stata.results_poisson_aweight_hc1
class TestGlmPoissonFwClu(CheckWeight):
@classmethod
def setup_class(cls):
fweights = [1, 1, 1, 2, 2, 2, 3, 3, 3, 1, 1, 1, 2, 2, 2, 3, 3]
# faking aweights by using normalized freq_weights
fweights = np.array(fweights)
wsum = fweights.sum()
nobs = len(cpunish_data.endog)
aweights = fweights / wsum * nobs
gid = np.arange(1, 17 + 1) // 2
n_groups = len(np.unique(gid))
# no wnobs yet in sandwich covariance calcualtion
cls.corr_fact = 1 / np.sqrt(n_groups / (n_groups - 1))
# np.sqrt((wsum - 1.) / wsum)
cov_kwds = {'groups': gid, 'use_correction': False}
with pytest.warns(SpecificationWarning):
mod = GLM(cpunish_data.endog, cpunish_data.exog,
family=sm.families.Poisson(),
freq_weights=fweights)
cls.res1 = mod.fit(cov_type='cluster', cov_kwds=cov_kwds)
cls.res2 = res_stata.results_poisson_fweight_clu1
class TestGlmTweedieAwNr(CheckWeight):
@classmethod
def setup_class(cls):
import statsmodels.formula.api as smf
data = sm.datasets.fair.load_pandas()
endog = data.endog
data = data.exog
data['fair'] = endog
aweights = np.repeat(1, len(data.index))
aweights[::5] = 5
aweights[::13] = 3
model = smf.glm(
'fair ~ age + yrs_married',
data=data,
family=sm.families.Tweedie(
var_power=1.55,
link=sm.families.links.log()
),
var_weights=aweights
)
cls.res1 = model.fit(rtol=1e-25, atol=0)
cls.res2 = res_r.results_tweedie_aweights_nonrobust
class TestGlmGammaAwNr(CheckWeight):
@classmethod
def setup_class(cls):
from .results.results_glm import CancerLog
res2 = CancerLog()
endog = res2.endog
exog = res2.exog[:, :-1]
exog = sm.add_constant(exog, prepend=True)
aweights = np.repeat(1, len(endog))
aweights[::5] = 5
aweights[::13] = 3
model = sm.GLM(endog, exog,
family=sm.families.Gamma(link=sm.families.links.log()),
var_weights=aweights)
cls.res1 = model.fit(rtol=1e-25, atol=0)
cls.res2 = res_r.results_gamma_aweights_nonrobust
def test_r_llf(self):
scale = self.res1.deviance / self.res1._iweights.sum()
ll = self.res1.family.loglike(self.res1.model.endog,
self.res1.mu,
freq_weights=self.res1._var_weights,
scale=scale)
assert_allclose(ll, self.res2.ll, atol=1e-6, rtol=1e-7)
class TestGlmGaussianAwNr(CheckWeight):
@classmethod
def setup_class(cls):
import statsmodels.formula.api as smf
data = sm.datasets.cpunish.load_pandas()
endog = data.endog
data = data.exog
data['EXECUTIONS'] = endog
data['INCOME'] /= 1000
aweights = np.array([1, 2, 3, 4, 5, 4, 3, 2, 1, 2, 3, 4, 5, 4, 3, 2,
1])
model = smf.glm(
'EXECUTIONS ~ INCOME + SOUTH - 1',
data=data,
family=sm.families.Gaussian(link=sm.families.links.log()),
var_weights=aweights
)
cls.res1 = model.fit(rtol=1e-25, atol=0)
cls.res2 = res_r.results_gaussian_aweights_nonrobust
def test_r_llf(self):
res1 = self.res1
res2 = self.res2
model = self.res1.model
# Need to make a few adjustments...
# First, calculate scale using nobs as denominator
scale = res1.scale * model.df_resid / model.wnobs
# Calculate llf using adj scale and wts = freq_weights
wts = model.freq_weights
llf = model.family.loglike(model.endog, res1.mu,
freq_weights=wts,
scale=scale)
# SM uses (essentially) stat's loglike formula... first term is
# (endog - mu) ** 2 / scale
adj_sm = -1 / 2 * ((model.endog - res1.mu) ** 2).sum() / scale
# R has these 2 terms that stata/sm do not
adj_r = -model.wnobs / 2 + np.sum(np.log(model.var_weights)) / 2
llf_adj = llf - adj_sm + adj_r
assert_allclose(llf_adj, res2.ll, atol=1e-6, rtol=1e-7)
def gen_endog(lin_pred, family_class, link, binom_version=0):
np.random.seed(872)
fam = sm.families
mu = link().inverse(lin_pred)
if family_class == fam.Binomial:
if binom_version == 0:
endog = 1*(np.random.uniform(size=len(lin_pred)) < mu)
else:
endog = np.empty((len(lin_pred), 2))
n = 10
endog[:, 0] = (np.random.uniform(size=(len(lin_pred), n)) <
mu[:, None]).sum(1)
endog[:, 1] = n - endog[:, 0]
elif family_class == fam.Poisson:
endog = np.random.poisson(mu)
elif family_class == fam.Gamma:
endog = np.random.gamma(2, mu)
elif family_class == fam.Gaussian:
endog = mu + np.random.normal(size=len(lin_pred))
elif family_class == fam.NegativeBinomial:
from scipy.stats.distributions import nbinom
endog = nbinom.rvs(mu, 0.5)
elif family_class == fam.InverseGaussian:
from scipy.stats.distributions import invgauss
endog = invgauss.rvs(mu)
elif family_class == fam.Tweedie:
rate = 1
shape = 1.0
scale = mu / (rate * shape)
endog = (np.random.poisson(rate, size=scale.shape[0]) *
np.random.gamma(shape * scale))
else:
raise ValueError
return endog
def test_wtd_gradient_irls():
# Compare the results when using gradient optimization and IRLS.
# TODO: Find working examples for inverse_squared link
np.random.seed(87342)
fam = sm.families
lnk = sm.families.links
families = [(fam.Binomial, [lnk.logit, lnk.probit, lnk.cloglog, lnk.log,
lnk.cauchy]),
(fam.Poisson, [lnk.log, lnk.identity, lnk.sqrt]),
(fam.Gamma, [lnk.log, lnk.identity, lnk.inverse_power]),
(fam.Gaussian, [lnk.identity, lnk.log, lnk.inverse_power]),
(fam.InverseGaussian, [lnk.log, lnk.identity,
lnk.inverse_power,
lnk.inverse_squared]),
(fam.NegativeBinomial, [lnk.log, lnk.inverse_power,
lnk.inverse_squared, lnk.identity])]
n = 100
p = 3
exog = np.random.normal(size=(n, p))
exog[:, 0] = 1
skip_one = False
for family_class, family_links in families:
for link in family_links:
for binom_version in 0, 1:
method = 'bfgs'
if family_class != fam.Binomial and binom_version == 1:
continue
elif family_class == fam.Binomial and link == lnk.cloglog:
# Cannot get gradient to converage with var_weights here
continue
elif family_class == fam.Binomial and link == lnk.log:
# Cannot get gradient to converage with var_weights here
continue
elif (family_class, link) == (fam.Poisson, lnk.identity):
lin_pred = 20 + exog.sum(1)
elif (family_class, link) == (fam.Binomial, lnk.log):
lin_pred = -1 + exog.sum(1) / 8
elif (family_class, link) == (fam.Poisson, lnk.sqrt):
lin_pred = -2 + exog.sum(1)
elif (family_class, link) == (fam.Gamma, lnk.log):
# Cannot get gradient to converge with var_weights here
continue
elif (family_class, link) == (fam.Gamma, lnk.identity):
# Cannot get gradient to converage with var_weights here
continue
elif (family_class, link) == (fam.Gamma, lnk.inverse_power):
# Cannot get gradient to converage with var_weights here
continue
elif (family_class, link) == (fam.Gaussian, lnk.log):
# Cannot get gradient to converage with var_weights here
continue
elif (family_class, link) == (fam.Gaussian, lnk.inverse_power):
# Cannot get gradient to converage with var_weights here
continue
elif (family_class, link) == (fam.InverseGaussian, lnk.log):
# Cannot get gradient to converage with var_weights here
lin_pred = -1 + exog.sum(1)
continue
elif (family_class, link) == (fam.InverseGaussian,
lnk.identity):
# Cannot get gradient to converage with var_weights here
lin_pred = 20 + 5*exog.sum(1)
lin_pred = np.clip(lin_pred, 1e-4, np.inf)
continue
elif (family_class, link) == (fam.InverseGaussian,
lnk.inverse_squared):
lin_pred = 0.5 + exog.sum(1) / 5
continue # skip due to non-convergence
elif (family_class, link) == (fam.InverseGaussian,
lnk.inverse_power):
lin_pred = 1 + exog.sum(1) / 5
method = 'newton'
elif (family_class, link) == (fam.NegativeBinomial,
lnk.identity):
lin_pred = 20 + 5*exog.sum(1)
lin_pred = np.clip(lin_pred, 1e-3, np.inf)
method = 'newton'
elif (family_class, link) == (fam.NegativeBinomial,
lnk.inverse_squared):
lin_pred = 0.1 + np.random.uniform(size=exog.shape[0])
continue # skip due to non-convergence
elif (family_class, link) == (fam.NegativeBinomial,
lnk.inverse_power):
# Cannot get gradient to converage with var_weights here
lin_pred = 1 + exog.sum(1) / 5
continue
elif (family_class, link) == (fam.Gaussian, lnk.inverse_power):
# adding skip because of convergence failure
skip_one = True
else:
lin_pred = np.random.uniform(size=exog.shape[0])
endog = gen_endog(lin_pred, family_class, link, binom_version)
if binom_version == 0:
wts = np.ones_like(endog)
tmp = np.random.randint(
2,
5,
size=(endog > endog.mean()).sum()
)
wts[endog > endog.mean()] = tmp
else:
wts = np.ones(shape=endog.shape[0])
y = endog[:, 0] / endog.sum(axis=1)
tmp = np.random.gamma(2, size=(y > y.mean()).sum())
wts[y > y.mean()] = tmp
with warnings.catch_warnings():
warnings.simplefilter("ignore")
mod_irls = sm.GLM(endog, exog, var_weights=wts,
family=family_class(link=link()))
rslt_irls = mod_irls.fit(method="IRLS", atol=1e-10,
tol_criterion='params')
# Try with and without starting values.
for max_start_irls, start_params in ((0, rslt_irls.params),
(3, None)):
# TODO: skip convergence failures for now
if max_start_irls > 0 and skip_one:
continue
with warnings.catch_warnings():
warnings.simplefilter("ignore")
mod_gradient = sm.GLM(endog, exog, var_weights=wts,
family=family_class(link=link()))
rslt_gradient = mod_gradient.fit(
max_start_irls=max_start_irls,
start_params=start_params,
method=method
)
assert_allclose(rslt_gradient.params,
rslt_irls.params, rtol=1e-6, atol=5e-5)
assert_allclose(rslt_gradient.llf, rslt_irls.llf,
rtol=1e-6, atol=1e-6)
assert_allclose(rslt_gradient.scale, rslt_irls.scale,
rtol=1e-6, atol=1e-6)
# Get the standard errors using expected information.
gradient_bse = rslt_gradient.bse
ehess = mod_gradient.hessian(rslt_gradient.params,
observed=False)
gradient_bse = np.sqrt(-np.diag(np.linalg.inv(ehess)))
assert_allclose(gradient_bse, rslt_irls.bse, rtol=1e-6,
atol=5e-5)
def get_dummies(x):
values = np.sort(np.unique(x))
out = np.zeros(shape=(x.shape[0], len(values) - 1))
for i, v in enumerate(values):
if i == 0:
continue
out[:, i - 1] = np.where(v == x, 1, 0)
return out
class TestRepeatedvsAggregated(CheckWeight):
@classmethod
def setup_class(cls):
np.random.seed(4321)
n = 100
p = 5
exog = np.empty((n, p))
exog[:, 0] = 1
exog[:, 1] = np.random.randint(low=-5, high=5, size=n)
x = np.repeat(np.array([1, 2, 3, 4]), n / 4)
exog[:, 2:] = get_dummies(x)
beta = np.array([-1, 0.1, -0.05, .2, 0.35])
lin_pred = (exog * beta).sum(axis=1)
family = sm.families.Poisson
link = sm.families.links.log
endog = gen_endog(lin_pred, family, link)
mod1 = sm.GLM(endog, exog, family=family(link=link()))
cls.res1 = mod1.fit()
agg = pd.DataFrame(exog)
agg['endog'] = endog
agg_endog = agg.groupby([0, 1, 2, 3, 4]).sum()[['endog']]
agg_wt = agg.groupby([0, 1, 2, 3, 4]).count()[['endog']]
agg_exog = np.array(agg_endog.index.tolist())
agg_wt = agg_wt['endog']
agg_endog = agg_endog['endog']
mod2 = sm.GLM(agg_endog, agg_exog, family=family(link=link()),
exposure=agg_wt)
cls.res2 = mod2.fit()
class TestRepeatedvsAverage(CheckWeight):
@classmethod
def setup_class(cls):
np.random.seed(4321)
n = 10000
p = 5
exog = np.empty((n, p))
exog[:, 0] = 1
exog[:, 1] = np.random.randint(low=-5, high=5, size=n)
x = np.repeat(np.array([1, 2, 3, 4]), n / 4)
exog[:, 2:] = get_dummies(x)
beta = np.array([-1, 0.1, -0.05, .2, 0.35])
lin_pred = (exog * beta).sum(axis=1)
family = sm.families.Poisson
link = sm.families.links.log
endog = gen_endog(lin_pred, family, link)
mod1 = sm.GLM(endog, exog, family=family(link=link()))
cls.res1 = mod1.fit()
agg = pd.DataFrame(exog)
agg['endog'] = endog
agg_endog = agg.groupby([0, 1, 2, 3, 4]).sum()[['endog']]
agg_wt = agg.groupby([0, 1, 2, 3, 4]).count()[['endog']]
agg_exog = np.array(agg_endog.index.tolist())
agg_wt = agg_wt['endog']
avg_endog = agg_endog['endog'] / agg_wt
mod2 = sm.GLM(avg_endog, agg_exog, family=family(link=link()),
var_weights=agg_wt)
cls.res2 = mod2.fit()
class TestTweedieRepeatedvsAggregated(CheckWeight):
@classmethod
def setup_class(cls):
np.random.seed(4321)
n = 10000
p = 5
exog = np.empty((n, p))
exog[:, 0] = 1
exog[:, 1] = np.random.randint(low=-5, high=5, size=n)
x = np.repeat(np.array([1, 2, 3, 4]), n / 4)
exog[:, 2:] = get_dummies(x)
beta = np.array([7, 0.1, -0.05, .2, 0.35])
lin_pred = (exog * beta).sum(axis=1)
family = sm.families.Tweedie
link = sm.families.links.log
endog = gen_endog(lin_pred, family, link)
mod1 = sm.GLM(endog, exog, family=family(link=link(), var_power=1.5))
cls.res1 = mod1.fit(rtol=1e-20, atol=0, tol_criterion='params')
agg = pd.DataFrame(exog)
agg['endog'] = endog
agg_endog = agg.groupby([0, 1, 2, 3, 4]).sum()[['endog']]
agg_wt = agg.groupby([0, 1, 2, 3, 4]).count()[['endog']]
agg_exog = np.array(agg_endog.index.tolist())
agg_wt = agg_wt['endog']
agg_endog = agg_endog['endog']
mod2 = sm.GLM(agg_endog, agg_exog,
family=family(link=link(), var_power=1.5),
exposure=agg_wt, var_weights=agg_wt ** 0.5)
cls.res2 = mod2.fit(rtol=1e-20, atol=0, tol_criterion='params')
class TestTweedieRepeatedvsAverage(CheckWeight):
@classmethod
def setup_class(cls):
np.random.seed(4321)
n = 1000
p = 5
exog = np.empty((n, p))
exog[:, 0] = 1
exog[:, 1] = np.random.randint(low=-5, high=5, size=n)
x = np.repeat(np.array([1, 2, 3, 4]), n / 4)
exog[:, 2:] = get_dummies(x)
beta = np.array([7, 0.1, -0.05, .2, 0.35])
lin_pred = (exog * beta).sum(axis=1)
family = sm.families.Tweedie
link = sm.families.links.log
endog = gen_endog(lin_pred, family, link)
mod1 = sm.GLM(endog, exog, family=family(link=link(), var_power=1.5))
cls.res1 = mod1.fit(rtol=1e-10, atol=0, tol_criterion='params',
scaletype='x2')
agg = pd.DataFrame(exog)
agg['endog'] = endog
agg_endog = agg.groupby([0, 1, 2, 3, 4]).sum()[['endog']]
agg_wt = agg.groupby([0, 1, 2, 3, 4]).count()[['endog']]
agg_exog = np.array(agg_endog.index.tolist())
agg_wt = agg_wt['endog']
avg_endog = agg_endog['endog'] / agg_wt
mod2 = sm.GLM(avg_endog, agg_exog,
family=family(link=link(), var_power=1.5),
var_weights=agg_wt)
cls.res2 = mod2.fit(rtol=1e-10, atol=0, tol_criterion='params')
class TestBinomial0RepeatedvsAverage(CheckWeight):
@classmethod
def setup_class(cls):
np.random.seed(4321)
n = 20
p = 5
exog = np.empty((n, p))
exog[:, 0] = 1
exog[:, 1] = np.random.randint(low=-5, high=5, size=n)
x = np.repeat(np.array([1, 2, 3, 4]), n / 4)
exog[:, 2:] = get_dummies(x)
beta = np.array([-1, 0.1, -0.05, .2, 0.35])
lin_pred = (exog * beta).sum(axis=1)
family = sm.families.Binomial
link = sm.families.links.logit
endog = gen_endog(lin_pred, family, link, binom_version=0)
mod1 = sm.GLM(endog, exog, family=family(link=link()))
cls.res1 = mod1.fit(rtol=1e-10, atol=0, tol_criterion='params',
scaletype='x2')
agg = pd.DataFrame(exog)
agg['endog'] = endog
agg_endog = agg.groupby([0, 1, 2, 3, 4]).sum()[['endog']]
agg_wt = agg.groupby([0, 1, 2, 3, 4]).count()[['endog']]
agg_exog = np.array(agg_endog.index.tolist())
agg_wt = agg_wt['endog']
avg_endog = agg_endog['endog'] / agg_wt
mod2 = sm.GLM(avg_endog, agg_exog,
family=family(link=link()),
var_weights=agg_wt)
cls.res2 = mod2.fit(rtol=1e-10, atol=0, tol_criterion='params')
class TestBinomial0RepeatedvsDuplicated(CheckWeight):
@classmethod
def setup_class(cls):
np.random.seed(4321)
n = 10000
p = 5
exog = np.empty((n, p))
exog[:, 0] = 1
exog[:, 1] = np.random.randint(low=-5, high=5, size=n)
x = np.repeat(np.array([1, 2, 3, 4]), n / 4)
exog[:, 2:] = get_dummies(x)
beta = np.array([-1, 0.1, -0.05, .2, 0.35])
lin_pred = (exog * beta).sum(axis=1)
family = sm.families.Binomial
link = sm.families.links.logit
endog = gen_endog(lin_pred, family, link, binom_version=0)
wt = np.random.randint(1, 5, n)
mod1 = sm.GLM(endog, exog, family=family(link=link()), freq_weights=wt)
cls.res1 = mod1.fit()
exog_dup = np.repeat(exog, wt, axis=0)
endog_dup = np.repeat(endog, wt)
mod2 = sm.GLM(endog_dup, exog_dup, family=family(link=link()))
cls.res2 = mod2.fit()
def test_warnings_raised():
weights = [1, 1, 1, 2, 2, 2, 3, 3, 3, 1, 1, 1, 2, 2, 2, 3, 3]
# faking aweights by using normalized freq_weights
weights = np.array(weights)
gid = np.arange(1, 17 + 1) // 2
cov_kwds = {'groups': gid, 'use_correction': False}
with pytest.warns(SpecificationWarning):
res1 = GLM(cpunish_data.endog, cpunish_data.exog,
family=sm.families.Poisson(), freq_weights=weights
).fit(cov_type='cluster', cov_kwds=cov_kwds)
res1.summary()
with pytest.warns(SpecificationWarning):
res1 = GLM(cpunish_data.endog, cpunish_data.exog,
family=sm.families.Poisson(), var_weights=weights
).fit(cov_type='cluster', cov_kwds=cov_kwds)
res1.summary()
weights = [1, 1, 1, 2, 2, 2, 3, 3, 3, 1, 1, 1, 2, 2, 2, 3, 3]
@pytest.mark.parametrize('formatted', [weights, np.asarray(weights),
pd.Series(weights)],
ids=['list', 'ndarray', 'Series'])
def test_weights_different_formats(formatted):
check_weights_as_formats(formatted)
def check_weights_as_formats(weights):
res = GLM(cpunish_data.endog, cpunish_data.exog,
family=sm.families.Poisson(), freq_weights=weights
).fit()
assert isinstance(res._freq_weights, np.ndarray)
assert isinstance(res._var_weights, np.ndarray)
assert isinstance(res._iweights, np.ndarray)
res = GLM(cpunish_data.endog, cpunish_data.exog,
family=sm.families.Poisson(), var_weights=weights
).fit()
assert isinstance(res._freq_weights, np.ndarray)
assert isinstance(res._var_weights, np.ndarray)
assert isinstance(res._iweights, np.ndarray)
class TestBinomialVsVarWeights(CheckWeight):
@classmethod
def setup_class(cls):
from statsmodels.datasets.star98 import load
data = load()
data.exog = np.asarray(data.exog)
data.endog = np.asarray(data.endog)
data.exog /= data.exog.std(0)
data.exog = add_constant(data.exog, prepend=False)
cls.res1 = GLM(data.endog, data.exog,
family=sm.families.Binomial()).fit()
weights = data.endog.sum(axis=1)
endog2 = data.endog[:, 0] / weights
cls.res2 = GLM(endog2, data.exog,
family=sm.families.Binomial(),
var_weights=weights).fit()
class TestGlmGaussianWLS(CheckWeight):
@classmethod
def setup_class(cls):
import statsmodels.formula.api as smf
data = sm.datasets.cpunish.load_pandas()
endog = data.endog
data = data.exog
data['EXECUTIONS'] = endog
data['INCOME'] /= 1000
aweights = np.array([1, 2, 3, 4, 5, 4, 3, 2, 1, 2, 3, 4, 5, 4, 3, 2,
1])
model = smf.glm(
'EXECUTIONS ~ INCOME + SOUTH - 1',
data=data,
family=sm.families.Gaussian(link=sm.families.links.identity()),
var_weights=aweights
)
wlsmodel = smf.wls(
'EXECUTIONS ~ INCOME + SOUTH - 1',
data=data,
weights=aweights)
cls.res1 = model.fit(rtol=1e-25, atol=1e-25)
cls.res2 = wlsmodel.fit()
def test_incompatible_input():
weights = [1, 1, 1, 2, 2, 2, 3, 3, 3, 1, 1, 1, 2, 2, 2, 3, 3]
exog = cpunish_data.exog
endog = cpunish_data.endog
family = sm.families.Poisson()
# Too short
assert_raises(ValueError, GLM, endog, exog, family=family,
freq_weights=weights[:-1])
assert_raises(ValueError, GLM, endog, exog, family=family,
var_weights=weights[:-1])
# Too long
assert_raises(ValueError, GLM, endog, exog, family=family,
freq_weights=weights + [3])
assert_raises(ValueError, GLM, endog, exog, family=family,
var_weights=weights + [3])
# Too many dimensions
assert_raises(ValueError, GLM, endog, exog, family=family,
freq_weights=[weights, weights])
assert_raises(ValueError, GLM, endog, exog, family=family,
var_weights=[weights, weights])
def test_poisson_residuals():
nobs, k_exog = 100, 5
np.random.seed(987125)
x = np.random.randn(nobs, k_exog - 1)
x = add_constant(x)
y_true = x.sum(1) / 2
y = y_true + 2 * np.random.randn(nobs)
exposure = 1 + np.arange(nobs) // 4
yp = np.random.poisson(np.exp(y_true) * exposure)
yp[10:15] += 10
fam = sm.families.Poisson()
mod_poi_e = GLM(yp, x, family=fam, exposure=exposure)
res_poi_e = mod_poi_e.fit()
mod_poi_w = GLM(yp / exposure, x, family=fam, var_weights=exposure)
res_poi_w = mod_poi_w.fit()
assert_allclose(res_poi_e.resid_response / exposure,
res_poi_w.resid_response)
assert_allclose(res_poi_e.resid_pearson, res_poi_w.resid_pearson)
assert_allclose(res_poi_e.resid_deviance, res_poi_w.resid_deviance)
assert_allclose(res_poi_e.resid_anscombe, res_poi_w.resid_anscombe)
assert_allclose(res_poi_e.resid_anscombe_unscaled,
res_poi_w.resid_anscombe)
|
{
"content_hash": "9c558334606afe19e6ebb258e4b1c367",
"timestamp": "",
"source": "github",
"line_count": 918,
"max_line_length": 194,
"avg_line_length": 43.15686274509804,
"alnum_prop": 0.510752688172043,
"repo_name": "josef-pkt/statsmodels",
"id": "5872380b60b739466e46339017c909975bf65e4e",
"size": "39618",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "statsmodels/genmod/tests/test_glm_weights.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AGS Script",
"bytes": "457842"
},
{
"name": "Assembly",
"bytes": "10035"
},
{
"name": "Batchfile",
"bytes": "625"
},
{
"name": "C",
"bytes": "381"
},
{
"name": "Cython",
"bytes": "225838"
},
{
"name": "Fortran",
"bytes": "16671"
},
{
"name": "HTML",
"bytes": "148470"
},
{
"name": "MATLAB",
"bytes": "100525"
},
{
"name": "Python",
"bytes": "14428857"
},
{
"name": "R",
"bytes": "106569"
},
{
"name": "Shell",
"bytes": "25322"
},
{
"name": "Stata",
"bytes": "50129"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import json
class ImmutableObject(object):
"""
Superclass for immutable objects whose fields can only be modified via the
constructor.
:param kwargs: kwargs to set as fields on the object
:type kwargs: any
"""
def __init__(self, *args, **kwargs):
for key, value in kwargs.items():
if not hasattr(self, key) or callable(getattr(self, key)):
raise TypeError(
'__init__() got an unexpected keyword argument "%s"' %
key)
if value == getattr(self, key):
continue # Don't explicitly set default values
self.__dict__[key] = value
def __setattr__(self, name, value):
if name.startswith('_'):
return super(ImmutableObject, self).__setattr__(name, value)
raise AttributeError('Object is immutable.')
def __repr__(self):
kwarg_pairs = []
for (key, value) in sorted(self.__dict__.items()):
if isinstance(value, (frozenset, tuple)):
if not value:
continue
value = list(value)
kwarg_pairs.append('%s=%s' % (key, repr(value)))
return '%(classname)s(%(kwargs)s)' % {
'classname': self.__class__.__name__,
'kwargs': ', '.join(kwarg_pairs),
}
def __hash__(self):
hash_sum = 0
for key, value in self.__dict__.items():
hash_sum += hash(key) + hash(value)
return hash_sum
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def copy(self, **values):
"""
Copy the model with ``field`` updated to new value.
Examples::
# Returns a track with a new name
Track(name='foo').copy(name='bar')
# Return an album with a new number of tracks
Album(num_tracks=2).copy(num_tracks=5)
:param values: the model fields to modify
:type values: dict
:rtype: new instance of the model being copied
"""
data = {}
for key in self.__dict__.keys():
public_key = key.lstrip('_')
value = values.pop(public_key, self.__dict__[key])
data[public_key] = value
for key in values.keys():
if hasattr(self, key):
value = values.pop(key)
data[key] = value
if values:
raise TypeError(
'copy() got an unexpected keyword argument "%s"' % key)
return self.__class__(**data)
def serialize(self):
data = {}
data['__model__'] = self.__class__.__name__
for key in self.__dict__.keys():
public_key = key.lstrip('_')
value = self.__dict__[key]
if isinstance(value, (set, frozenset, list, tuple)):
value = [
v.serialize() if isinstance(v, ImmutableObject) else v
for v in value]
elif isinstance(value, ImmutableObject):
value = value.serialize()
if not (isinstance(value, list) and len(value) == 0):
data[public_key] = value
return data
class ModelJSONEncoder(json.JSONEncoder):
"""
Automatically serialize Mopidy models to JSON.
Usage::
>>> import json
>>> json.dumps({'a_track': Track(name='name')}, cls=ModelJSONEncoder)
'{"a_track": {"__model__": "Track", "name": "name"}}'
"""
def default(self, obj):
if isinstance(obj, ImmutableObject):
return obj.serialize()
return json.JSONEncoder.default(self, obj)
def model_json_decoder(dct):
"""
Automatically deserialize Mopidy models from JSON.
Usage::
>>> import json
>>> json.loads(
... '{"a_track": {"__model__": "Track", "name": "name"}}',
... object_hook=model_json_decoder)
{u'a_track': Track(artists=[], name=u'name')}
"""
if '__model__' in dct:
model_name = dct.pop('__model__')
cls = globals().get(model_name, None)
if issubclass(cls, ImmutableObject):
kwargs = {}
for key, value in dct.items():
kwargs[key] = value
return cls(**kwargs)
return dct
class Ref(ImmutableObject):
"""
Model to represent URI references with a human friendly name and type
attached. This is intended for use a lightweight object "free" of metadata
that can be passed around instead of using full blown models.
:param uri: object URI
:type uri: string
:param name: object name
:type name: string
:param type: object type
:type name: string
"""
#: The object URI. Read-only.
uri = None
#: The object name. Read-only.
name = None
#: The object type, e.g. "artist", "album", "track", "playlist",
#: "directory". Read-only.
type = None
#: Constant used for comparison with the :attr:`type` field.
ALBUM = 'album'
#: Constant used for comparison with the :attr:`type` field.
ARTIST = 'artist'
#: Constant used for comparison with the :attr:`type` field.
DIRECTORY = 'directory'
#: Constant used for comparison with the :attr:`type` field.
PLAYLIST = 'playlist'
#: Constant used for comparison with the :attr:`type` field.
TRACK = 'track'
@classmethod
def album(cls, **kwargs):
"""Create a :class:`Ref` with ``type`` :attr:`ALBUM`."""
kwargs['type'] = Ref.ALBUM
return cls(**kwargs)
@classmethod
def artist(cls, **kwargs):
"""Create a :class:`Ref` with ``type`` :attr:`ARTIST`."""
kwargs['type'] = Ref.ARTIST
return cls(**kwargs)
@classmethod
def directory(cls, **kwargs):
"""Create a :class:`Ref` with ``type`` :attr:`DIRECTORY`."""
kwargs['type'] = Ref.DIRECTORY
return cls(**kwargs)
@classmethod
def playlist(cls, **kwargs):
"""Create a :class:`Ref` with ``type`` :attr:`PLAYLIST`."""
kwargs['type'] = Ref.PLAYLIST
return cls(**kwargs)
@classmethod
def track(cls, **kwargs):
"""Create a :class:`Ref` with ``type`` :attr:`TRACK`."""
kwargs['type'] = Ref.TRACK
return cls(**kwargs)
class Artist(ImmutableObject):
"""
:param uri: artist URI
:type uri: string
:param name: artist name
:type name: string
:param musicbrainz_id: MusicBrainz ID
:type musicbrainz_id: string
"""
#: The artist URI. Read-only.
uri = None
#: The artist name. Read-only.
name = None
#: The MusicBrainz ID of the artist. Read-only.
musicbrainz_id = None
class Album(ImmutableObject):
"""
:param uri: album URI
:type uri: string
:param name: album name
:type name: string
:param artists: album artists
:type artists: list of :class:`Artist`
:param num_tracks: number of tracks in album
:type num_tracks: integer or :class:`None` if unknown
:param num_discs: number of discs in album
:type num_discs: integer or :class:`None` if unknown
:param date: album release date (YYYY or YYYY-MM-DD)
:type date: string
:param musicbrainz_id: MusicBrainz ID
:type musicbrainz_id: string
:param images: album image URIs
:type images: list of strings
"""
#: The album URI. Read-only.
uri = None
#: The album name. Read-only.
name = None
#: A set of album artists. Read-only.
artists = frozenset()
#: The number of tracks in the album. Read-only.
num_tracks = None
#: The number of discs in the album. Read-only.
num_discs = None
#: The album release date. Read-only.
date = None
#: The MusicBrainz ID of the album. Read-only.
musicbrainz_id = None
#: The album image URIs. Read-only.
images = frozenset()
# XXX If we want to keep the order of images we shouldn't use frozenset()
# as it doesn't preserve order. I'm deferring this issue until we got
# actual usage of this field with more than one image.
def __init__(self, *args, **kwargs):
self.__dict__['artists'] = frozenset(kwargs.pop('artists', None) or [])
self.__dict__['images'] = frozenset(kwargs.pop('images', None) or [])
super(Album, self).__init__(*args, **kwargs)
class Track(ImmutableObject):
"""
:param uri: track URI
:type uri: string
:param name: track name
:type name: string
:param artists: track artists
:type artists: list of :class:`Artist`
:param album: track album
:type album: :class:`Album`
:param composers: track composers
:type composers: string
:param performers: track performers
:type performers: string
:param genre: track genre
:type genre: string
:param track_no: track number in album
:type track_no: integer or :class:`None` if unknown
:param disc_no: disc number in album
:type disc_no: integer or :class:`None` if unknown
:param date: track release date (YYYY or YYYY-MM-DD)
:type date: string
:param length: track length in milliseconds
:type length: integer
:param bitrate: bitrate in kbit/s
:type bitrate: integer
:param comment: track comment
:type comment: string
:param musicbrainz_id: MusicBrainz ID
:type musicbrainz_id: string
:param last_modified: Represents last modification time
:type last_modified: integer or :class:`None` if unknown
"""
#: The track URI. Read-only.
uri = None
#: The track name. Read-only.
name = None
#: A set of track artists. Read-only.
artists = frozenset()
#: The track :class:`Album`. Read-only.
album = None
#: A set of track composers. Read-only.
composers = frozenset()
#: A set of track performers`. Read-only.
performers = frozenset()
#: The track genre. Read-only.
genre = None
#: The track number in the album. Read-only.
track_no = None
#: The disc number in the album. Read-only.
disc_no = None
#: The track release date. Read-only.
date = None
#: The track length in milliseconds. Read-only.
length = None
#: The track's bitrate in kbit/s. Read-only.
bitrate = None
#: The track comment. Read-only.
comment = None
#: The MusicBrainz ID of the track. Read-only.
musicbrainz_id = None
#: Integer representing when the track was last modified, exact meaning
#: depends on source of track. For local files this is the mtime, for other
#: backends it could be a timestamp or simply a version counter.
last_modified = None
def __init__(self, *args, **kwargs):
get = lambda key: frozenset(kwargs.pop(key, None) or [])
self.__dict__['artists'] = get('artists')
self.__dict__['composers'] = get('composers')
self.__dict__['performers'] = get('performers')
super(Track, self).__init__(*args, **kwargs)
class TlTrack(ImmutableObject):
"""
A tracklist track. Wraps a regular track and it's tracklist ID.
The use of :class:`TlTrack` allows the same track to appear multiple times
in the tracklist.
This class also accepts it's parameters as positional arguments. Both
arguments must be provided, and they must appear in the order they are
listed here.
This class also supports iteration, so your extract its values like this::
(tlid, track) = tl_track
:param tlid: tracklist ID
:type tlid: int
:param track: the track
:type track: :class:`Track`
"""
#: The tracklist ID. Read-only.
tlid = None
#: The track. Read-only.
track = None
def __init__(self, *args, **kwargs):
if len(args) == 2 and len(kwargs) == 0:
kwargs['tlid'] = args[0]
kwargs['track'] = args[1]
args = []
super(TlTrack, self).__init__(*args, **kwargs)
def __iter__(self):
return iter([self.tlid, self.track])
class Playlist(ImmutableObject):
"""
:param uri: playlist URI
:type uri: string
:param name: playlist name
:type name: string
:param tracks: playlist's tracks
:type tracks: list of :class:`Track` elements
:param last_modified:
playlist's modification time in milliseconds since Unix epoch
:type last_modified: int
"""
#: The playlist URI. Read-only.
uri = None
#: The playlist name. Read-only.
name = None
#: The playlist's tracks. Read-only.
tracks = tuple()
#: The playlist modification time in milliseconds since Unix epoch.
#: Read-only.
#:
#: Integer, or :class:`None` if unknown.
last_modified = None
def __init__(self, *args, **kwargs):
self.__dict__['tracks'] = tuple(kwargs.pop('tracks', None) or [])
super(Playlist, self).__init__(*args, **kwargs)
# TODO: def insert(self, pos, track): ... ?
@property
def length(self):
"""The number of tracks in the playlist. Read-only."""
return len(self.tracks)
class SearchResult(ImmutableObject):
"""
:param uri: search result URI
:type uri: string
:param tracks: matching tracks
:type tracks: list of :class:`Track` elements
:param artists: matching artists
:type artists: list of :class:`Artist` elements
:param albums: matching albums
:type albums: list of :class:`Album` elements
"""
# The search result URI. Read-only.
uri = None
# The tracks matching the search query. Read-only.
tracks = tuple()
# The artists matching the search query. Read-only.
artists = tuple()
# The albums matching the search query. Read-only.
albums = tuple()
def __init__(self, *args, **kwargs):
self.__dict__['tracks'] = tuple(kwargs.pop('tracks', None) or [])
self.__dict__['artists'] = tuple(kwargs.pop('artists', None) or [])
self.__dict__['albums'] = tuple(kwargs.pop('albums', None) or [])
super(SearchResult, self).__init__(*args, **kwargs)
|
{
"content_hash": "9f615ba7d99db7cea256c2d54b5e6254",
"timestamp": "",
"source": "github",
"line_count": 483,
"max_line_length": 79,
"avg_line_length": 29.434782608695652,
"alnum_prop": 0.5878877400295421,
"repo_name": "woutervanwijk/mopidy",
"id": "bedf8ca50b0777550a27d26fa8dd834c4a4e22e7",
"size": "14217",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "mopidy/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "610"
},
{
"name": "JavaScript",
"bytes": "74911"
},
{
"name": "Python",
"bytes": "925399"
}
],
"symlink_target": ""
}
|
"""Output formatters using shell syntax.
"""
from .base import SingleFormatter
import argparse
import six
class ShellFormatter(SingleFormatter):
def add_argument_group(self, parser):
group = parser.add_argument_group(
title='shell formatter',
description='a format a UNIX shell can parse (variable="value")',
)
group.add_argument(
'--variable',
action='append',
default=[],
dest='variables',
metavar='VARIABLE',
help=argparse.SUPPRESS,
)
group.add_argument(
'--prefix',
action='store',
default='',
dest='prefix',
help='add a prefix to all variable names',
)
def emit_one(self, column_names, data, stdout, parsed_args):
variable_names = [c.lower().replace(' ', '_')
for c in column_names
]
desired_columns = parsed_args.variables
for name, value in zip(variable_names, data):
if name in desired_columns or not desired_columns:
if isinstance(value, six.string_types):
value = value.replace('"', '\\"')
stdout.write('%s%s="%s"\n' % (parsed_args.prefix, name, value))
return
|
{
"content_hash": "e48e0d3e10fcefd762b5d0013055d39e",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 79,
"avg_line_length": 31.093023255813954,
"alnum_prop": 0.5272999252056844,
"repo_name": "enzochiau/cliff",
"id": "e613c22ffaf49a0ccded9742a86ff2a20b537426",
"size": "1337",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "cliff/formatters/shell.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "395"
},
{
"name": "Python",
"bytes": "94023"
},
{
"name": "Shell",
"bytes": "2022"
}
],
"symlink_target": ""
}
|
from zerorpc import zmq
import zerorpc
from testutils import teardown, random_ipc_endpoint
class MokupContext():
_next_id = 0
def new_msgid(self):
new_id = MokupContext._next_id
MokupContext._next_id += 1
return new_id
def test_context():
c = zerorpc.Context()
assert c.new_msgid() is not None
def test_event():
context = MokupContext()
event = zerorpc.Event('mylittleevent', (None,), context=context)
print event
assert event.name == 'mylittleevent'
assert event.header['message_id'] == 0
assert event.args == (None,)
event = zerorpc.Event('mylittleevent2', ('42',), context=context)
print event
assert event.name == 'mylittleevent2'
assert event.header['message_id'] == 1
assert event.args == ('42',)
event = zerorpc.Event('mylittleevent3', ('a', 42), context=context)
print event
assert event.name == 'mylittleevent3'
assert event.header['message_id'] == 2
assert event.args == ('a', 42)
event = zerorpc.Event('mylittleevent4', ('b', 21), context=context)
print event
assert event.name == 'mylittleevent4'
assert event.header['message_id'] == 3
assert event.args == ('b', 21)
packed = event.pack()
unpacked = zerorpc.Event.unpack(packed)
print unpacked
assert unpacked.name == 'mylittleevent4'
assert unpacked.header['message_id'] == 3
assert list(unpacked.args) == ['b', 21]
event = zerorpc.Event('mylittleevent5', ('c', 24, True),
header={'lol': 'rofl'}, context=None)
print event
assert event.name == 'mylittleevent5'
assert event.header['lol'] == 'rofl'
assert event.args == ('c', 24, True)
event = zerorpc.Event('mod', (42,), context=context)
print event
assert event.name == 'mod'
assert event.header['message_id'] == 4
assert event.args == (42,)
event.header.update({'stream': True})
assert event.header['stream'] is True
def test_events_req_rep():
endpoint = random_ipc_endpoint()
server = zerorpc.Events(zmq.REP)
server.bind(endpoint)
client = zerorpc.Events(zmq.REQ)
client.connect(endpoint)
client.emit('myevent', ('arg1',))
event = server.recv()
print event
assert event.name == 'myevent'
assert list(event.args) == ['arg1']
def test_events_req_rep2():
endpoint = random_ipc_endpoint()
server = zerorpc.Events(zmq.REP)
server.bind(endpoint)
client = zerorpc.Events(zmq.REQ)
client.connect(endpoint)
for i in xrange(10):
client.emit('myevent' + str(i), (i,))
event = server.recv()
print event
assert event.name == 'myevent' + str(i)
assert list(event.args) == [i]
server.emit('answser' + str(i * 2), (i * 2,))
event = client.recv()
print event
assert event.name == 'answser' + str(i * 2)
assert list(event.args) == [i * 2]
def test_events_dealer_router():
endpoint = random_ipc_endpoint()
server = zerorpc.Events(zmq.ROUTER)
server.bind(endpoint)
client = zerorpc.Events(zmq.DEALER)
client.connect(endpoint)
for i in xrange(6):
client.emit('myevent' + str(i), (i,))
event = server.recv()
print event
assert event.name == 'myevent' + str(i)
assert list(event.args) == [i]
reply_event = server.new_event('answser' + str(i * 2), (i * 2,))
reply_event.identity = event.identity
server.emit_event(reply_event)
event = client.recv()
print event
assert event.name == 'answser' + str(i * 2)
assert list(event.args) == [i * 2]
def test_events_push_pull():
endpoint = random_ipc_endpoint()
server = zerorpc.Events(zmq.PULL)
server.bind(endpoint)
client = zerorpc.Events(zmq.PUSH)
client.connect(endpoint)
for x in xrange(10):
client.emit('myevent', (x,))
for x in xrange(10):
event = server.recv()
print event
assert event.name == 'myevent'
assert list(event.args) == [x]
def test_msgpack():
context = zerorpc.Context()
event = zerorpc.Event('myevent', ('a',), context=context)
print event
assert type(event.name) == str
for key in event.header.keys():
assert type(key) == str
assert type(event.header['message_id']) == str
assert type(event.args[0]) == str
packed = event.pack()
event = event.unpack(packed)
print event
assert type(event.name) == str
for key in event.header.keys():
assert type(key) == str
assert type(event.header['message_id']) == str
assert type(event.args[0]) == str
event = zerorpc.Event('myevent', (u'a',), context=context)
print event
assert type(event.name) == str
for key in event.header.keys():
assert type(key) == str
assert type(event.header['message_id']) == str
assert type(event.args[0]) == unicode
packed = event.pack()
event = event.unpack(packed)
print event
assert type(event.name) == str
for key in event.header.keys():
assert type(key) == str
assert type(event.header['message_id']) == str
assert type(event.args[0]) == unicode
event = zerorpc.Event('myevent', (u'a', 'b'), context=context)
print event
assert type(event.name) == str
for key in event.header.keys():
assert type(key) == str
assert type(event.header['message_id']) == str
assert type(event.args[0]) == unicode
assert type(event.args[1]) == str
packed = event.pack()
event = event.unpack(packed)
print event
assert type(event.name) == str
for key in event.header.keys():
assert type(key) == str
assert type(event.header['message_id']) == str
assert type(event.args[0]) == unicode
assert type(event.args[1]) == str
|
{
"content_hash": "60dd5323e6133bb7d5cd922f861cb254",
"timestamp": "",
"source": "github",
"line_count": 202,
"max_line_length": 72,
"avg_line_length": 28.797029702970296,
"alnum_prop": 0.6114835826027162,
"repo_name": "stdrickforce/zerorpc-python",
"id": "224a41cc2740e992927f18196471387330660339",
"size": "7063",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_events.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "194984"
}
],
"symlink_target": ""
}
|
from geopy.compat import u
from geopy.point import Point
from geopy.geocoders import Photon
from test.geocoders.util import GeocoderTestBase
class PhotonTestCase(GeocoderTestBase): # pylint: disable=R0904,C0111
@classmethod
def setUpClass(cls):
cls.geocoder = Photon()
cls.known_country_it = "Francia"
cls.known_country_fr = "France"
def test_geocode(self):
"""
Photon.geocode
"""
self.geocode_run(
{"query": "14 rue pelisson villeurbanne"},
{"latitude": 45.7733963, "longitude": 4.88612369},
)
def test_unicode_name(self):
"""
Photon.geocode unicode
"""
self.geocode_run(
{"query": u("\u6545\u5bab")},
{"latitude": 39.916, "longitude": 116.390},
)
def test_reverse_string(self):
"""
Photon.reverse string
"""
self.reverse_run(
{"query": "45.7733105, 4.8869339"},
{"latitude": 45.7733105, "longitude": 4.8869339}
)
def test_reverse_point(self):
"""
Photon.reverse Point
"""
self.reverse_run(
{"query": Point(45.7733105, 4.8869339)},
{"latitude": 45.7733105, "longitude": 4.8869339}
)
def test_geocode_language_parameter(self):
"""
Photon.geocode using `language`
"""
result_geocode = self._make_request(
self.geocoder.geocode,
self.known_country_fr,
language="it",
)
self.assertEqual(
result_geocode.raw['properties']['country'],
self.known_country_it
)
def test_reverse_language_parameter(self):
"""
Photon.reverse using `language`
"""
result_reverse_it = self._make_request(
self.geocoder.reverse,
"45.7733105, 4.8869339",
exactly_one=True,
language="it",
)
self.assertEqual(
result_reverse_it.raw['properties']['country'],
self.known_country_it
)
result_reverse_fr = self._make_request(
self.geocoder.reverse,
"45.7733105, 4.8869339",
exactly_one=True,
language="fr"
)
self.assertEqual(
result_reverse_fr.raw['properties']['country'],
self.known_country_fr
)
|
{
"content_hash": "7ebdd9ae57ff6a29815e0e16edbfead5",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 70,
"avg_line_length": 27.337078651685392,
"alnum_prop": 0.528976572133169,
"repo_name": "Vimos/geopy",
"id": "41e42cebbcf57ac1722238ad2eee3c690db70f95",
"size": "2434",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "test/geocoders/photon.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "754"
},
{
"name": "Python",
"bytes": "290796"
},
{
"name": "Shell",
"bytes": "2116"
}
],
"symlink_target": ""
}
|
import os
import re
import mock
import shutil
import pickle
import tempfile
from unittest import TestCase
from mock import patch
from repoxplorer import index
from repoxplorer.index import commits
from repoxplorer.index import projects
from repoxplorer.indexer.git import indexer
class TestExtractCmtFunctions(TestCase):
def setUp(self):
self.maxDiff = None
def test_parse_commit_msg(self):
msg = """cmt subject
body line 1
body line 2
metakey: metavalue
author_date: 123
"""
subject, metadatas = indexer.parse_commit_msg(msg)
self.assertEqual(subject, 'cmt subject')
self.assertTrue(len(metadatas) == 0)
msg = """cmt subject
body line 1
body line 2
Fix: metavalue
Co-authored-by: metavalue2
"""
subject, metadatas = indexer.parse_commit_msg(msg)
self.assertEqual(subject, 'cmt subject')
self.assertIn(('fixes-bug', 'metavalue'), metadatas)
self.assertIn(('co-authored-by', 'metavalue2'), metadatas)
self.assertTrue(len(metadatas) == 2)
msg = """cmt subject
body line 1. nokey: novalue
Acked-By: metavalue
body line 2
"""
subject, metadatas = indexer.parse_commit_msg(msg)
self.assertEqual(subject, 'cmt subject')
self.assertIn(('acked-by', 'metavalue'), metadatas)
self.assertTrue(len(metadatas) == 1)
msg = """cmt subject
body line 1
body line 2
http://metavalue
"""
subject, metadatas = indexer.parse_commit_msg(msg)
self.assertEqual(subject, 'cmt subject')
self.assertTrue(len(metadatas) == 0)
msg = """Implement feature bp-feature-cool
This patch implement blueprint bp-feature-cool. Also
it add the documentation of the feature. I included
the fix for the bug bz16 as it was releated.
body line 2
http://metavalue
"""
p1 = re.compile('.*(blueprint) ([^ .]+).*')
p2 = re.compile('.*(bug) ([^ .]+).*')
parsers = [p1, p2]
subject, metadatas = indexer.parse_commit_msg(
msg, extra_parsers=parsers)
self.assertEqual(subject, 'Implement feature bp-feature-cool')
self.assertIn(('blueprint', 'bp-feature-cool'), metadatas)
self.assertIn(('bug', 'bz16'), metadatas)
self.assertTrue(len(metadatas) == 2)
def test_parse_commit_desc_output(self):
cd = os.path.dirname(os.path.realpath(__file__))
raw = open(
os.path.join(cd, 'gitshow.sample')).read().splitlines()
output = indexer.process_commits_desc_output(
raw, ['file:///gitshow.sample'])
expected = [
{'ttl': 487,
'line_modifieds': 10,
'commit_msg': 'Make playbook and task in topic singular',
'sha': '1ef6088bb6678b78993672ffdec93c7c99a0405d',
'repos': ['file:///gitshow.sample'],
'merge_commit': False,
'committer_date': 1493425136,
'author_date': 1493424649,
'committer_email': 'author.a@test',
'files_list': [
'modules',
'modules/openstack_project',
'modules/openstack_project/files',
'modules/openstack_project/files/puppetmaster',
'modules/openstack_project/files/puppetmaster/mqtt.py'],
'author_name': 'Author A',
'committer_name': 'Author A',
'author_email': 'author.a@test',
'author_email_domain': 'test'},
{'ttl': 0,
'line_modifieds': 0,
'commit_msg': 'Merge "Cast the playbook uuid as a string"',
'sha': '0e58c2fd54a50362138849a20bced510480dac8d',
'repos': ['file:///gitshow.sample'],
'merge_commit': True,
'committer_date': 1493423272,
'author_date': 1493423272,
'committer_email': 'review@openstack.org',
'files_list': [],
'author_name': 'Jenkins',
'committer_name': 'Gerrit Code Review',
'author_email': 'jenkins@review.openstack.org',
'author_email_domain': 'review.openstack.org'},
{'ttl': 0,
'line_modifieds': 0,
'commit_msg': 'Merge "Add subunit gearman worker '
'mqtt info to firehose docs"',
'sha': 'fb7d2712a907f8f01b817889e88abaf0dad6a109',
'repos': ['file:///gitshow.sample'],
'merge_commit': True,
'committer_date': 1493413511,
'author_date': 1493413511,
'committer_email': 'review@openstack.org',
'files_list': [],
'author_name': 'Jenkins',
'committer_name': 'Gerrit Code Review',
'author_email': 'jenkins@review.openstack.org',
'author_email_domain': 'review.openstack.org'},
{'ttl': 1651141,
'line_modifieds': 64,
'commit_msg': 'Add firehose schema docs',
'sha': 'd9fda5b81f6c8d64fda2ca2c08246492e800292f',
'repos': ['file:///gitshow.sample'],
'merge_commit': False,
'committer_date': 1493244209,
'author_date': 1491593068,
'committer_email': 'author.b@test',
'files_list': [
'doc',
'doc/source',
'doc/source/components.rst',
'doc/source/firehose.rst',
'doc/source/firehose_schema.rst',
'doc/source/systems.rst'],
'author_name': 'Author A',
'committer_name': 'Author B',
'author_email': 'author.a@test',
'author_email_domain': 'test'},
{'ttl': 0,
'line_modifieds': 2,
'commit_msg': 'Fix use of _ that should be - in mqtt-ca_certs',
'sha': '8cb34d026e9c290b83c52301d82b2011406fc7d8',
'repos': ['file:///gitshow.sample'],
'merge_commit': False,
'committer_date': 1493240029,
'author_date': 1493240029,
'committer_email': 'author.c@test',
'files_list': [
'modules',
'modules/openstack_project',
'modules/openstack_project/templates',
'modules/openstack_project/templates/logstash',
'modules/openstack_project/templates/logstash/' +
'jenkins-subunit-worker.yaml.erb'],
'author_name': 'Author C',
'committer_name': 'Author C',
'author_email': 'author.c@test',
'author_email_domain': 'test'},
{'author_date': 1493240029,
'author_email': 'author.c@test',
'author_email_domain': 'test',
'author_name': 'Author C',
'commit_msg': 'Add type declarations for Windows API calls as '
'found in jaraco.windows 3.6.1. Fixes #758.',
'committer_date': 1493240029,
'committer_email': 'author.c@test',
'committer_name': 'Author C',
'files_list': [
'paramiko',
'paramiko/_winapi.py',
'sites',
'sites/www',
'sites/www/changelog.rst'],
'line_modifieds': 21,
'merge_commit': False,
'repos': ['file:///gitshow.sample'],
'sha': '88364beba125cc8e6e314885db1c909b3d526340',
'ttl': 0},
{'author_date': 1493240029,
'author_email': 'author.c@test',
'author_email_domain': 'test',
'author_name': 'Author C',
'commit_msg': 'windows linefeed was breaking /usr/bin/env from '
'executing correctly :/s/',
'committer_date': 1493240029,
'committer_email': 'author.c@test',
'committer_name': 'Author C',
'line_modifieds': 2,
'merge_commit': False,
'files_list': ['SickBeard.py'],
'repos': ['file:///gitshow.sample'],
'sha': 'f5d7eb5b623b625062cf0d3d8d552ee0ea9000dd',
'ttl': 0},
{'author_date': 1493240029,
'author_email': 'author.c@test',
'author_email_domain': 'test',
'author_name': 'Author C',
'commit_msg': 'Merge pull request #13155 from '
'coolljt0725/fix_validate_tag_name',
'committer_date': 1493240029,
'committer_email': 'author.c@test',
'committer_name': 'Author C',
'line_modifieds': 0,
'files_list': [],
'merge_commit': True,
'repos': ['file:///gitshow.sample'],
'sha': '8e1cc08e799a83ace198ee7a3c6f9169635e7f46',
'ttl': 0},
{'author_date': 1352117713,
'author_email': '',
'author_email_domain': '',
'author_name': 'mysql-builder@oracle.com',
'commit_msg': '',
'committer_date': 1352117713,
'committer_email': '',
'committer_name': 'mysql-builder@oracle.com',
'files_list': [],
'line_modifieds': 0,
'merge_commit': False,
'repos': ['file:///gitshow.sample'],
'sha': '1c939e7487986f1ada02f1414f6101b7cd696824',
'ttl': 0},
]
self.assertListEqual(output, expected)
class TestRefsClean(TestCase):
@classmethod
def setUpClass(cls):
indexer.conf['git_store'] = tempfile.mkdtemp()
indexer.conf['db_path'] = tempfile.mkdtemp()
indexer.conf['db_cache_path'] = tempfile.mkdtemp()
indexer.conf['elasticsearch_index'] = 'repoxplorertest'
indexer.get_commits_desc = lambda path, shas: []
cls.con = index.Connector()
cls.cmts = commits.Commits(cls.con)
@classmethod
def tearDownClass(cls):
shutil.rmtree(indexer.conf['git_store'])
shutil.rmtree(indexer.conf['db_path'])
shutil.rmtree(indexer.conf['db_cache_path'])
cls.con.ic.delete(index=cls.con.index)
def setUp(self):
self.seen_refs = os.path.join(
indexer.conf['db_cache_path'], indexer.SEEN_REFS_CACHED)
if os.path.isfile(self.seen_refs):
os.unlink(self.seen_refs)
def tearDown(self):
os.unlink(self.seen_refs)
def init_fake_process_commits_desc_output(self, pi, repo_commits):
to_create, _ = pi.compute_to_create_to_update()
to_create = [
c for c in repo_commits if c['sha'] in to_create]
indexer.process_commits_desc_output = lambda buf, ref_id: to_create
def test_cleaner(self):
pi = indexer.RepoIndexer('p1', 'file:///tmp/p1',
con=self.con)
# This is the initial commits list of a repository we
# are going to index
repo_commits1 = [
{
'sha': '3597334f2cb10772950c97ddf2f6cc17b184',
'author_date': 1410456005,
'committer_date': 1410456005,
'author_name': 'Nakata Daisuke',
'committer_name': 'Nakata Daisuke',
'author_email': 'n.suke@joker.org',
'committer_email': 'n.suke@joker.org',
'repos': [
'file:///tmp/p1:p1:master', ],
'line_modifieds': 10,
'commit_msg': 'Add init method',
},
]
pi.commits = [rc['sha'] for rc in repo_commits1]
pi.set_branch('master')
# Start the indexation
pi.get_current_commits_indexed()
pi.compute_to_index_to_delete()
self.init_fake_process_commits_desc_output(pi, repo_commits1)
pi.index()
repo_commits2 = [
{
'sha': '3597334f2cb10772950c97ddf2f6cc17b185',
'author_date': 1410456005,
'committer_date': 1410456005,
'author_name': 'Nakata Daisuke',
'committer_name': 'Nakata Daisuke',
'author_email': 'n.suke@joker.org',
'committer_email': 'n.suke@joker.org',
'repos': [
'file:///tmp/p1:p1:devel', 'meta_ref: Fedora'],
'line_modifieds': 10,
'commit_msg': 'Add init method',
},
{
'sha': '3597334f2cb10772950c97ddf2f6cc17b186',
'author_date': 1410456005,
'committer_date': 1410456005,
'author_name': 'Nakata Daisuke',
'committer_name': 'Nakata Daisuke',
'author_email': 'n.suke@joker.org',
'committer_email': 'n.suke@joker.org',
'repos': [
'file:///tmp/p1:p1:devel',
'file:///tmp/p1:p1:master',
'meta_ref: Fedora'],
'line_modifieds': 10,
'commit_msg': 'Add init method',
},
]
pi.commits = [rc['sha'] for rc in repo_commits2]
pi.set_branch('devel')
# Start the indexation
pi.get_current_commits_indexed()
pi.compute_to_index_to_delete()
self.init_fake_process_commits_desc_output(pi, repo_commits2)
pi.index()
shas = ['3597334f2cb10772950c97ddf2f6cc17b184',
'3597334f2cb10772950c97ddf2f6cc17b185',
'3597334f2cb10772950c97ddf2f6cc17b186']
pi.tags = [['3597334f2cb10772950c97ddf2f6cc17b184', 'refs/tags/t1'],
['3597334f2cb10772950c97ddf2f6cc17b185', 'refs/tags/t2']]
pi.index_tags()
self.assertEqual(len(pi.t.get_tags(['file:///tmp/p1:p1'])), 2)
# Check 3 commits are indexed
self.assertEqual(
len([c for c in self.cmts.get_commits_by_id(shas)['docs']
if c['found']]), 3)
# Now create the RefsCleaner instance
# '3597334f2cb10772950c97ddf2f6cc17b185' will be removed
# '3597334f2cb10772950c97ddf2f6cc17b186' will be updated
# as the devel branch is no longer referenced
with patch.object(index.YAMLBackend, 'load_db'):
with patch.object(projects.Projects, 'get_projects') as gp:
projects_index = projects.Projects('/tmp/fakepath')
gp.return_value = {
'p1': {
'refs': [
{'branch': 'master',
'shortrid': 'file:///tmp/p1:p1',
'fullrid': 'file:///tmp/p1:p1:master',
'uri': 'file:///tmp/p1'}
]
}
}
rc = indexer.RefsCleaner(projects_index, con=self.con)
refs_to_clean = rc.find_refs_to_clean()
rc.clean(refs_to_clean)
# Two commits must be in the db (two was from the master branch)
cmts = self.cmts.get_commits_by_id(shas)['docs']
self.assertEqual(len([c for c in cmts if c['found']]), 2)
# Verify that remaining commits belong to ref
# 'file:///tmp/p1:p1:master' only
for cmt in cmts:
if not cmt['found']:
continue
self.assertIn(
'file:///tmp/p1:p1:master', cmt['_source']['repos'])
self.assertNotIn(
'file:///tmp/p1:p1:devel', cmt['_source']['repos'])
# Here make sure tags are still reference as the base_id still exists
self.assertEqual(len(pi.t.get_tags(['file:///tmp/p1:p1'])), 2)
# Reinstance a RefsCleaner with no repos
with patch.object(index.YAMLBackend, 'load_db'):
with patch.object(projects.Projects, 'get_projects') as gp:
projects_index = projects.Projects('/tmp/fakepath')
gp.return_value = {
'p1': {
'refs': []
}
}
rc = indexer.RefsCleaner(projects_index, con=self.con)
refs_to_clean = rc.find_refs_to_clean()
rc.clean(refs_to_clean)
# Make sure tags have been deleted
self.assertEqual(len(pi.t.get_tags(['file:///tmp/p1:p1'])), 0)
class TestRepoIndexer(TestCase):
@classmethod
def setUpClass(cls):
indexer.conf['git_store'] = tempfile.mkdtemp()
indexer.conf['db_path'] = tempfile.mkdtemp()
indexer.conf['db_cache_path'] = tempfile.mkdtemp()
indexer.conf['elasticsearch_index'] = 'repoxplorertest'
indexer.get_commits_desc = lambda path, shas: []
cls.con = index.Connector()
cls.cmts = commits.Commits(cls.con)
@classmethod
def tearDownClass(cls):
shutil.rmtree(indexer.conf['git_store'])
shutil.rmtree(indexer.conf['db_path'])
shutil.rmtree(indexer.conf['db_cache_path'])
cls.con.ic.delete(index=cls.con.index)
def setUp(self):
self.seen_refs = os.path.join(
indexer.conf['db_cache_path'], indexer.SEEN_REFS_CACHED)
if os.path.isfile(self.seen_refs):
os.unlink(self.seen_refs)
def tearDown(self):
os.unlink(self.seen_refs)
def init_fake_process_commits_desc_output(self, pi, repo_commits):
to_create, _ = pi.compute_to_create_to_update()
to_create = [
c for c in repo_commits if c['sha'] in to_create]
indexer.process_commits_desc_output = lambda buf, ref_id: to_create
def test_init(self):
pi = indexer.RepoIndexer('p1', 'file:///tmp/p1')
pi.set_branch('master')
self.assertEqual(pi.ref_id, 'file:///tmp/p1:p1:master')
self.assertTrue(os.path.isdir(indexer.conf['git_store']))
seen_refs = pickle.load(open(self.seen_refs, 'rb'))
self.assertTrue(len(seen_refs), 1)
self.assertIn('file:///tmp/p1:p1:master', seen_refs)
def test_init_with_meta_ref(self):
pi = indexer.RepoIndexer('p1', 'file:///tmp/p1', meta_ref='Fedora')
pi.set_branch('master')
self.assertEqual(pi.ref_id, 'file:///tmp/p1:p1:master')
self.assertEqual(pi.meta_ref, 'meta_ref: Fedora')
self.assertTrue(os.path.isdir(indexer.conf['git_store']))
seen_refs = pickle.load(open(self.seen_refs, 'rb'))
# The meta-ref is not added to seen refs store
self.assertTrue(len(seen_refs), 1)
self.assertIn('file:///tmp/p1:p1:master', seen_refs)
def test_index(self):
pi = indexer.RepoIndexer('p1', 'file:///tmp/p1',
con=self.con)
# This is the initial commits list of a repository we
# are going to index
repo_commits = [
{
'sha': '3597334f2cb10772950c97ddf2f6cc17b184',
'author_date': 1410456005,
'committer_date': 1410456005,
'author_name': 'Nakata Daisuke',
'committer_name': 'Nakata Daisuke',
'author_email': 'n.suke@joker.org',
'committer_email': 'n.suke@joker.org',
'repos': [
'file:///tmp/p1:p1:master', ],
'line_modifieds': 10,
'commit_msg': 'Add init method',
},
]
pi.commits = [rc['sha'] for rc in repo_commits]
pi.set_branch('master')
# Start the indexation
pi.heads = [
('3597334f2cb10772950c97ddf2f6cc17b184', 'refs/head/master')]
self.assertFalse(pi.is_branch_fully_indexed())
pi.get_current_commits_indexed()
pi.compute_to_index_to_delete()
self.init_fake_process_commits_desc_output(pi, repo_commits)
pi.index()
# Check
self.assertDictEqual(
self.cmts.get_commits_by_id(pi.commits)['docs'][0]['_source'],
repo_commits[0])
self.assertEqual(
len(self.cmts.get_commits_by_id(pi.commits)['docs']), 1)
self.assertTrue(pi.is_branch_fully_indexed())
# The repo evolves with an additional commit
additional_cmt = {
'sha': '3597334f2cb10772950c97ddf2f6cc17b185',
'author_date': 1410456006,
'committer_date': 1410456006,
'author_name': 'Nakata Daisuke',
'committer_name': 'Nakata Daisuke',
'author_email': 'n.suke@joker.org',
'committer_email': 'n.suke@joker.org',
'repos': [
'file:///tmp/p1:p1:master', ],
'line_modifieds': 15,
'commit_msg': 'Second commit',
}
repo_commits.append(additional_cmt)
pi.commits = [rc['sha'] for rc in repo_commits]
# Start the indexation
pi.heads = [
('3597334f2cb10772950c97ddf2f6cc17b185', 'refs/head/master')]
self.assertFalse(pi.is_branch_fully_indexed())
pi.get_current_commits_indexed()
pi.compute_to_index_to_delete()
self.init_fake_process_commits_desc_output(pi, repo_commits)
pi.index()
# Check
cmts = set([c['_source']['sha'] for c in
self.cmts.get_commits_by_id(pi.commits)['docs']])
self.assertEqual(len(cmts), 2)
cmts.difference_update(set([c['sha'] for c in repo_commits]))
self.assertEqual(len(cmts), 0)
self.assertTrue(pi.is_branch_fully_indexed())
# The repo history has been rewritten
repo_commits.pop()
pi.commits = [rc['sha'] for rc in repo_commits]
# Start the indexation
pi.heads = [
('3597334f2cb10772950c97ddf2f6cc17b184', 'refs/head/master')]
self.assertFalse(pi.is_branch_fully_indexed())
pi.get_current_commits_indexed()
pi.compute_to_index_to_delete()
self.init_fake_process_commits_desc_output(pi, repo_commits)
pi.index()
# Check
self.assertDictEqual(
self.cmts.get_commits_by_id(pi.commits)['docs'][0]['_source'],
repo_commits[0])
self.assertEqual(
len(self.cmts.get_commits_by_id(pi.commits)['docs']), 1)
self.assertTrue(pi.is_branch_fully_indexed())
# Index p2 a fork of p1
pi2 = indexer.RepoIndexer('p2', 'file:///tmp/p2',
con=self.con)
repo2_commits = [
{
'sha': '3597334f2cb10772950c97ddf2f6cc17b184',
'author_date': 1410456005,
'committer_date': 1410456005,
'author_name': 'Nakata Daisuke',
'committer_name': 'Nakata Daisuke',
'author_email': 'n.suke@joker.org',
'committer_email': 'n.suke@joker.org',
'repos': [
'file:///tmp/p2:p2:master', ],
'line_modifieds': 10,
'commit_msg': 'Init commit in repo2',
},
]
pi2.commits = [rc['sha'] for rc in repo2_commits]
pi2.set_branch('master')
# Start the indexation
pi2.heads = [
('3597334f2cb10772950c97ddf2f6cc17b184', 'refs/head/master')]
self.assertFalse(pi2.is_branch_fully_indexed())
pi2.get_current_commits_indexed()
pi2.compute_to_index_to_delete()
self.init_fake_process_commits_desc_output(pi2, repo2_commits)
pi2.index()
# Check the commits has been marked belonging to both repos
cmt = self.cmts.get_commit(repo2_commits[0]['sha'])
self.assertIn('file:///tmp/p2:p2:master', cmt['repos'])
self.assertIn('file:///tmp/p1:p1:master', cmt['repos'])
self.assertTrue(pi2.is_branch_fully_indexed())
# Add another commit with metadata extracted
cmt = {
'sha': '3597334f2cb10772950c97ddf2f6cc17b200',
'author_date': 1410456006,
'committer_date': 1410456006,
'author_name': 'Nakata Daisuke',
'committer_name': 'Nakata Daisuke',
'author_email': 'n.suke@joker.org',
'committer_email': 'n.suke@joker.org',
'repos': [
'file:///tmp/p2:p2:master', ],
'line_modifieds': 10,
'commit_msg': 'Some commit',
'close-bug': '123',
'related-to-story': '124',
}
repo2_commits.append(cmt)
pi2.commits = [rc['sha'] for rc in repo2_commits]
# Start the indexation
pi2.heads = [
('3597334f2cb10772950c97ddf2f6cc17b200', 'refs/head/master')]
self.assertFalse(pi2.is_branch_fully_indexed())
pi2.get_current_commits_indexed()
pi2.compute_to_index_to_delete()
self.init_fake_process_commits_desc_output(pi2, repo2_commits)
pi2.index()
# Check the commits has been marked belonging to both repos
cmt = self.cmts.get_commit(repo2_commits[1]['sha'])
self.assertIn('close-bug', cmt)
self.assertEqual(cmt['close-bug'], '123')
self.assertIn('related-to-story', cmt)
self.assertEqual(cmt['related-to-story'], '124')
self.assertTrue(pi2.is_branch_fully_indexed())
def test_index_tags(self):
pi = indexer.RepoIndexer('p1', 'file:///tmp/p1',
con=self.con)
with mock.patch.object(indexer, 'run') as run:
run.return_value = "123\trefs/tags/t1\n124\trefs/tags/t2\n"
pi.get_refs()
pi.get_tags()
self.assertListEqual(
pi.tags, [['123', 'refs/tags/t1'], ['124', 'refs/tags/t2']])
# This is the initial commits list of a repository we
# are going to index
repo_commits = [
{
'sha': '123',
'author_date': 1410456005,
'committer_date': 1410456005,
'author_name': 'Nakata Daisuke',
'committer_name': 'Nakata Daisuke',
'author_email': 'n.suke@joker.org',
'committer_email': 'n.suke@joker.org',
'repos': [
'file:///tmp/p1:p1:master', ],
'line_modifieds': 10,
'commit_msg': 'Add init method',
},
{
'sha': '124',
'author_date': 1410456006,
'committer_date': 1410456006,
'author_name': 'Nakata Daisuke',
'committer_name': 'Nakata Daisuke',
'author_email': 'n.suke@joker.org',
'committer_email': 'n.suke@joker.org',
'repos': [
'file:///tmp/p1:p1:master', ],
'line_modifieds': 10,
'commit_msg': 'Add init method',
},
]
pi.commits = [rc['sha'] for rc in repo_commits]
pi.set_branch('master')
# Start the indexation
pi.get_current_commits_indexed()
pi.compute_to_index_to_delete()
to_create, _ = pi.compute_to_create_to_update()
to_create = [
c for c in repo_commits if c['sha'] in to_create]
indexer.process_commits_desc_output = lambda buf, ref_id: to_create
pi.index()
# Start indexation of tags
pi.index_tags()
# Do it a second time
pi.index_tags()
tags = pi.t.get_tags(['file:///tmp/p1:p1'])
t1 = [t['_source'] for t in tags if t['_source']['sha'] == '123'][0]
self.assertEqual(t1['date'], 1410456005)
self.assertEqual(t1['name'], 't1')
t2 = [t['_source'] for t in tags if t['_source']['sha'] == '124'][0]
self.assertEqual(t2['date'], 1410456006)
self.assertEqual(t2['name'], 't2')
|
{
"content_hash": "1ef98bd0e12843cd3edb24297d5b6109",
"timestamp": "",
"source": "github",
"line_count": 687,
"max_line_length": 77,
"avg_line_length": 40.2532751091703,
"alnum_prop": 0.5345700441165835,
"repo_name": "morucci/repoxplorer",
"id": "1342e0192d2437c3051e516f526ef59caa5262ed",
"size": "27654",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "repoxplorer/tests/test_indexer_git.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1262"
},
{
"name": "Dockerfile",
"bytes": "2173"
},
{
"name": "HTML",
"bytes": "40803"
},
{
"name": "JavaScript",
"bytes": "81744"
},
{
"name": "Python",
"bytes": "344757"
},
{
"name": "Shell",
"bytes": "261"
}
],
"symlink_target": ""
}
|
"""
tests.components.switch.test_mqtt
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests mqtt switch.
"""
import unittest
from homeassistant.const import STATE_ON, STATE_OFF
import homeassistant.core as ha
import homeassistant.components.switch as switch
from tests.common import mock_mqtt_component, fire_mqtt_message
class TestSensorMQTT(unittest.TestCase):
""" Test the MQTT switch. """
def setUp(self): # pylint: disable=invalid-name
self.hass = ha.HomeAssistant()
self.mock_publish = mock_mqtt_component(self.hass)
def tearDown(self): # pylint: disable=invalid-name
""" Stop down stuff we started. """
self.hass.stop()
def test_controlling_state_via_topic(self):
self.assertTrue(switch.setup(self.hass, {
'switch': {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'state-topic',
'command_topic': 'command-topic',
'payload_on': 'beer on',
'payload_off': 'beer off'
}
}))
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
fire_mqtt_message(self.hass, 'state-topic', 'beer on')
self.hass.pool.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_ON, state.state)
fire_mqtt_message(self.hass, 'state-topic', 'beer off')
self.hass.pool.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
def test_sending_mqtt_commands_and_optimistic(self):
self.assertTrue(switch.setup(self.hass, {
'switch': {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'command-topic',
'payload_on': 'beer on',
'payload_off': 'beer off',
'qos': 2
}
}))
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
switch.turn_on(self.hass, 'switch.test')
self.hass.pool.block_till_done()
self.assertEqual(('command-topic', 'beer on', 2, False),
self.mock_publish.mock_calls[-1][1])
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_ON, state.state)
switch.turn_off(self.hass, 'switch.test')
self.hass.pool.block_till_done()
self.assertEqual(('command-topic', 'beer off', 2, False),
self.mock_publish.mock_calls[-1][1])
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
def test_controlling_state_via_topic_and_json_message(self):
self.assertTrue(switch.setup(self.hass, {
'switch': {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'state-topic',
'command_topic': 'command-topic',
'payload_on': 'beer on',
'payload_off': 'beer off',
'value_template': '{{ value_json.val }}'
}
}))
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
fire_mqtt_message(self.hass, 'state-topic', '{"val":"beer on"}')
self.hass.pool.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_ON, state.state)
fire_mqtt_message(self.hass, 'state-topic', '{"val":"beer off"}')
self.hass.pool.block_till_done()
state = self.hass.states.get('switch.test')
self.assertEqual(STATE_OFF, state.state)
|
{
"content_hash": "a410e34ece8fe8b5bfde1f073b4f73da",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 73,
"avg_line_length": 33.736363636363635,
"alnum_prop": 0.559687415790892,
"repo_name": "nevercast/home-assistant",
"id": "b7c20e5ff94fb535e5f9d4d097601132ac00033c",
"size": "3711",
"binary": false,
"copies": "5",
"ref": "refs/heads/dev",
"path": "tests/components/switch/test_mqtt.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1338771"
},
{
"name": "Python",
"bytes": "1408024"
},
{
"name": "Shell",
"bytes": "4573"
}
],
"symlink_target": ""
}
|
"""The tests for the ASUSWRT device tracker platform."""
import os
import unittest
from unittest import mock
import voluptuous as vol
from homeassistant.bootstrap import _setup_component
from homeassistant.components import device_tracker
from homeassistant.components.device_tracker.asuswrt import (
CONF_PROTOCOL, CONF_MODE, CONF_PUB_KEY, PLATFORM_SCHEMA, DOMAIN)
from homeassistant.const import (CONF_PLATFORM, CONF_PASSWORD, CONF_USERNAME,
CONF_HOST)
from tests.common import get_test_home_assistant, get_test_config_dir
FAKEFILE = None
def setup_module():
"""Setup the test module."""
global FAKEFILE
FAKEFILE = get_test_config_dir('fake_file')
with open(FAKEFILE, 'w') as out:
out.write(' ')
def teardown_module():
"""Tear down the module."""
os.remove(FAKEFILE)
class TestComponentsDeviceTrackerASUSWRT(unittest.TestCase):
"""Tests for the ASUSWRT device tracker platform."""
hass = None
def setup_method(self, _):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.hass.config.components = ['zone']
def teardown_method(self, _):
"""Stop everything that was started."""
try:
os.remove(self.hass.config.path(device_tracker.YAML_DEVICES))
except FileNotFoundError:
pass
def test_password_or_pub_key_required(self): \
# pylint: disable=invalid-name
"""Test creating an AsusWRT scanner without a pass or pubkey."""
self.assertFalse(_setup_component(
self.hass, DOMAIN, {DOMAIN: {
CONF_PLATFORM: 'asuswrt',
CONF_HOST: 'fake_host',
CONF_USERNAME: 'fake_user'
}}))
@mock.patch(
'homeassistant.components.device_tracker.asuswrt.AsusWrtDeviceScanner',
return_value=mock.MagicMock())
def test_get_scanner_with_password_no_pubkey(self, asuswrt_mock): \
# pylint: disable=invalid-name
"""Test creating an AsusWRT scanner with a password and no pubkey."""
conf_dict = {
DOMAIN: {
CONF_PLATFORM: 'asuswrt',
CONF_HOST: 'fake_host',
CONF_USERNAME: 'fake_user',
CONF_PASSWORD: 'fake_pass'
}
}
self.assertIsNotNone(_setup_component(self.hass, DOMAIN, conf_dict))
conf_dict[DOMAIN][CONF_MODE] = 'router'
conf_dict[DOMAIN][CONF_PROTOCOL] = 'ssh'
asuswrt_mock.assert_called_once_with(conf_dict[DOMAIN])
@mock.patch(
'homeassistant.components.device_tracker.asuswrt.AsusWrtDeviceScanner',
return_value=mock.MagicMock())
def test_get_scanner_with_pubkey_no_password(self, asuswrt_mock): \
# pylint: disable=invalid-name
"""Test creating an AsusWRT scanner with a pubkey and no password."""
conf_dict = {
device_tracker.DOMAIN: {
CONF_PLATFORM: 'asuswrt',
CONF_HOST: 'fake_host',
CONF_USERNAME: 'fake_user',
CONF_PUB_KEY: FAKEFILE
}
}
self.assertIsNotNone(_setup_component(self.hass, DOMAIN, conf_dict))
conf_dict[DOMAIN][CONF_MODE] = 'router'
conf_dict[DOMAIN][CONF_PROTOCOL] = 'ssh'
asuswrt_mock.assert_called_once_with(conf_dict[DOMAIN])
def test_ssh_login_with_pub_key(self):
"""Test that login is done with pub_key when configured to."""
ssh = mock.MagicMock()
ssh_mock = mock.patch('pexpect.pxssh.pxssh', return_value=ssh)
ssh_mock.start()
self.addCleanup(ssh_mock.stop)
conf_dict = PLATFORM_SCHEMA({
CONF_PLATFORM: 'asuswrt',
CONF_HOST: 'fake_host',
CONF_USERNAME: 'fake_user',
CONF_PUB_KEY: FAKEFILE
})
update_mock = mock.patch(
'homeassistant.components.device_tracker.asuswrt.'
'AsusWrtDeviceScanner.get_asuswrt_data')
update_mock.start()
self.addCleanup(update_mock.stop)
asuswrt = device_tracker.asuswrt.AsusWrtDeviceScanner(conf_dict)
asuswrt.ssh_connection()
ssh.login.assert_called_once_with('fake_host', 'fake_user',
ssh_key=FAKEFILE)
def test_ssh_login_with_password(self):
"""Test that login is done with password when configured to."""
ssh = mock.MagicMock()
ssh_mock = mock.patch('pexpect.pxssh.pxssh', return_value=ssh)
ssh_mock.start()
self.addCleanup(ssh_mock.stop)
conf_dict = PLATFORM_SCHEMA({
CONF_PLATFORM: 'asuswrt',
CONF_HOST: 'fake_host',
CONF_USERNAME: 'fake_user',
CONF_PASSWORD: 'fake_pass'
})
update_mock = mock.patch(
'homeassistant.components.device_tracker.asuswrt.'
'AsusWrtDeviceScanner.get_asuswrt_data')
update_mock.start()
self.addCleanup(update_mock.stop)
asuswrt = device_tracker.asuswrt.AsusWrtDeviceScanner(conf_dict)
asuswrt.ssh_connection()
ssh.login.assert_called_once_with('fake_host', 'fake_user',
password='fake_pass')
def test_ssh_login_without_password_or_pubkey(self): \
# pylint: disable=invalid-name
"""Test that login is not called without password or pub_key."""
ssh = mock.MagicMock()
ssh_mock = mock.patch('pexpect.pxssh.pxssh', return_value=ssh)
ssh_mock.start()
self.addCleanup(ssh_mock.stop)
conf_dict = {
CONF_PLATFORM: 'asuswrt',
CONF_HOST: 'fake_host',
CONF_USERNAME: 'fake_user',
}
with self.assertRaises(vol.Invalid):
conf_dict = PLATFORM_SCHEMA(conf_dict)
update_mock = mock.patch(
'homeassistant.components.device_tracker.asuswrt.'
'AsusWrtDeviceScanner.get_asuswrt_data')
update_mock.start()
self.addCleanup(update_mock.stop)
self.assertFalse(_setup_component(self.hass, DOMAIN,
{DOMAIN: conf_dict}))
ssh.login.assert_not_called()
|
{
"content_hash": "8ddd00906d5f187fdf6fe0b62680a42b",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 79,
"avg_line_length": 37.24404761904762,
"alnum_prop": 0.5993287517979863,
"repo_name": "leoc/home-assistant",
"id": "a4d5ee64b3227128417a2e61631936df2d8026cd",
"size": "6257",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/device_tracker/test_asuswrt.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1366220"
},
{
"name": "Python",
"bytes": "3636900"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "7255"
}
],
"symlink_target": ""
}
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField
from wtforms.validators import Required, Length, Email, Regexp, EqualTo
from wtforms import ValidationError
from ..models import User
class LoginForm(FlaskForm):
email = StringField('Email', validators=[Required(), Length(1, 64),
Email()])
password = PasswordField('Password', validators=[Required()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Log In')
class RegistrationForm(FlaskForm):
email = StringField('Email', validators=[Required(), Length(1, 64),
Email()])
username = StringField('Username', validators=[
Required(), Length(1, 64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0,
'Usernames must have only letters, '
'numbers, dots or underscores')])
password = PasswordField('Password', validators=[
Required(), EqualTo('password2', message='Passwords must match.')])
password2 = PasswordField('Confirm password', validators=[Required()])
submit = SubmitField('Register')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Username already in use.')
class ChangePasswordForm(FlaskForm):
old_password = PasswordField('Old password', validators=[Required()])
password = PasswordField('New password', validators=[
Required(), EqualTo('password2', message='Passwords must match')])
password2 = PasswordField('Confirm new password', validators=[Required()])
submit = SubmitField('Update Password')
class ChangeEmailForm(FlaskForm):
email = StringField('New Email', validators=[Required(), Length(1, 64),
Email()])
password = PasswordField('Password', validators=[Required()])
submit = SubmitField('Update Email Address')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
class PasswordResetRequestForm(FlaskForm):
email = StringField('Email', validators=[Required(), Length(1, 64),
Email()])
submit = SubmitField('Reset Password')
class PasswordResetForm(FlaskForm):
email = StringField('Email', validators=[Required(), Length(1, 64),
Email()])
password = PasswordField('New Password', validators=[
Required(), EqualTo('password2', message='Passwords must match')])
password2 = PasswordField('Confirm password', validators=[Required()])
submit = SubmitField('Reset Password')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first() is None:
raise ValidationError('Unknown email address.')
|
{
"content_hash": "92aecee170611b697b0ce5b27765bff4",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 80,
"avg_line_length": 46.15942028985507,
"alnum_prop": 0.6270015698587127,
"repo_name": "Tuklab/tuklab101",
"id": "91e247454e685d8547149fbcc6b09d28853d7778",
"size": "3185",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/auth/forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1855"
},
{
"name": "HTML",
"bytes": "16885"
},
{
"name": "Makefile",
"bytes": "413"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "79248"
},
{
"name": "Shell",
"bytes": "56705"
}
],
"symlink_target": ""
}
|
import sys
from PyQt5.QtCore import QCoreApplication, Qt
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QPushButton, QAction, QMessageBox
from PyQt5.QtWidgets import QCheckBox, QProgressBar, QComboBox, QLabel, QStyleFactory
from PyQt5.QtWidgets import QFontDialog
class window(QMainWindow):
def __init__(self):
super(window, self).__init__()
self.setGeometry(50, 50, 500, 300)
self.setWindowTitle('pyqt5 Tut')
# self.setWindowIcon(QIcon('pic.png'))
extractAction = QAction('&Get to the choppah', self)
extractAction.setShortcut('Ctrl+Q')
extractAction.setStatusTip('leave the app')
extractAction.triggered.connect(self.close_application)
self.statusBar()
mainMenu = self.menuBar()
fileMenu = mainMenu.addMenu('&File')
fileMenu.addAction(extractAction)
extractAction = QAction(QIcon('pic.png'), 'flee the scene', self)
extractAction.triggered.connect(self.close_application)
self.toolBar = self.addToolBar('extraction')
self.toolBar.addAction(extractAction)
fontChoice = QAction('Font', self)
fontChoice.triggered.connect(self.font_choice)
# self.toolBar = self.addToolBar('Font')
self.toolBar.addAction(fontChoice)
self.home()
def font_choice(self):
font, valid = QFontDialog.getFont()
if valid:
self.styleChoice.setFont(font)
def home(self):
btn = QPushButton('quit', self)
btn.clicked.connect(self.close_application)
btn.resize(btn.sizeHint())
btn.move(0, 100)
checkBox = QCheckBox('Enlarge window', self)
# checkBox.toggle() # if you want to be checked in in the begin
checkBox.move(0, 50)
checkBox.stateChanged.connect(self.enlarge_window)
self.progress = QProgressBar(self)
self.progress.setGeometry(200, 80, 250, 20)
self.btn = QPushButton('download', self)
self.btn.move(200, 120)
self.btn.clicked.connect(self.download)
self.styleChoice = QLabel('Windows', self)
comboBox = QComboBox(self)
comboBox.addItem('motif')
comboBox.addItem('Windows')
comboBox.addItem('cde')
comboBox.addItem('Plastique')
comboBox.addItem('Cleanlooks')
comboBox.addItem('windowsvista')
comboBox.move(25, 250)
self.styleChoice.move(25, 150)
comboBox.activated[str].connect(self.style_choice)
self.show()
def style_choice(self, text):
self.styleChoice.setText(text)
QApplication.setStyle(QStyleFactory.create(text))
def download(self):
self.completed = 0
while self.completed < 100:
self.completed += 0.0001
self.progress.setValue(self.completed)
def enlarge_window(self, state):
if state == Qt.Checked:
self.setGeometry(50, 50, 1000, 600)
else:
self.setGeometry(50, 50 , 500, 300)
def close_application(self):
choice = QMessageBox.question(self, 'Message',
"Are you sure to quit?", QMessageBox.Yes |
QMessageBox.No, QMessageBox.No)
if choice == QMessageBox.Yes:
print('quit application')
sys.exit()
else:
pass
if __name__ == "__main__": # had to add this otherwise app crashed
def run():
app = QApplication(sys.argv)
Gui = window()
sys.exit(app.exec_())
run()
|
{
"content_hash": "45b9fbd414e022ade6c72f397cbde43d",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 97,
"avg_line_length": 30.652542372881356,
"alnum_prop": 0.615427149571468,
"repo_name": "kenwaldek/pythonprogramming",
"id": "ba5e0f29665f20018bce7f9fb2f4d382401f00ce",
"size": "4051",
"binary": false,
"copies": "1",
"ref": "refs/heads/pythonprogramming",
"path": "pyqt5_lesson_11.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "44435"
}
],
"symlink_target": ""
}
|
"""SCons.Scanner.RC
This module implements the depenency scanner for RC (Interface
Definition Language) files.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Scanner/RC.py 2009/09/04 16:33:07 david"
import SCons.Node.FS
import SCons.Scanner
import re
def RCScan():
"""Return a prototype Scanner instance for scanning RC source files"""
res_re= r'^(?:\s*#\s*(?:include)|' \
'.*?\s+(?:ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)' \
'\s*.*?)' \
'\s*(<|"| )([^>"\s]+)(?:[>" ])*$'
resScanner = SCons.Scanner.ClassicCPP( "ResourceScanner",
"$RCSUFFIXES",
"CPPPATH",
res_re )
return resScanner
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
{
"content_hash": "31c7bc7657c106deb3558c1a89db98c1",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 90,
"avg_line_length": 37.38181818181818,
"alnum_prop": 0.669260700389105,
"repo_name": "cournape/numscons",
"id": "2498d59dee0895200b0c03d009b2ab40c48f548a",
"size": "2056",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "numscons/scons-local/scons-local-1.2.0/SCons/Scanner/RC.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1275"
},
{
"name": "FORTRAN",
"bytes": "146"
},
{
"name": "Python",
"bytes": "2033297"
},
{
"name": "Shell",
"bytes": "421"
}
],
"symlink_target": ""
}
|
import json
import os
import re
import sys
from unittest import mock
from urllib.parse import urlparse
import django
from django import test
from django.conf import settings
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.urls import reverse
import pytest
from lxml import etree
from unittest.mock import patch
from pyquery import PyQuery as pq
from olympia import amo, core
from olympia.access import acl
from olympia.access.models import Group, GroupUser
from olympia.addons.models import Addon, AddonUser, get_random_slug
from olympia.amo.sitemap import get_sitemap_path
from olympia.amo.tests import (
APITestClient,
TestCase,
WithDynamicEndpointsAndTransactions,
check_links,
reverse_ns,
user_factory,
)
from olympia.amo.views import handler500
from olympia.users.models import UserProfile
from olympia.zadmin.models import set_config
@pytest.mark.django_db
@pytest.mark.parametrize('locale_pair', settings.LANGUAGES)
def test_locale_switcher(client, locale_pair):
response = client.get('/{}/developers/'.format(locale_pair[0]))
assert response.status_code == 200
class Test403(TestCase):
fixtures = ['base/users']
def setUp(self):
super(Test403, self).setUp()
assert self.client.login(email='clouserw@gmail.com')
def test_403_no_app(self):
response = self.client.get('/en-US/admin/', follow=True)
assert response.status_code == 403
self.assertTemplateUsed(response, 'amo/403.html')
def test_403_app(self):
response = self.client.get('/en-US/android/admin/', follow=True)
assert response.status_code == 403
self.assertTemplateUsed(response, 'amo/403.html')
class Test404(TestCase):
def test_404_no_app(self):
"""Make sure a 404 without an app doesn't turn into a 500."""
# That could happen if helpers or templates expect APP to be defined.
url = reverse('amo.monitor')
response = self.client.get(url + 'nonsense')
assert response.status_code == 404
self.assertTemplateUsed(response, 'amo/404.html')
def test_404_app_links(self):
res = self.client.get('/en-US/android/xxxxxxx')
assert res.status_code == 404
self.assertTemplateUsed(res, 'amo/404.html')
links = pq(res.content)('[role=main] ul a[href^="/en-US/android"]')
assert links.length == 4
def test_404_api_v3(self):
response = self.client.get('/api/v3/lol')
assert response.status_code == 404
data = json.loads(response.content)
assert data['detail'] == 'Not found.'
def test_404_api_v4(self):
response = self.client.get('/api/v4/lol')
assert response.status_code == 404
data = json.loads(response.content)
assert data['detail'] == 'Not found.'
class Test500(TestCase):
def test_500_logged_in(self):
self.client.login(email=user_factory().email)
response = self.client.get('/services/500')
assert response.status_code == 500
self.assertTemplateUsed(response, 'amo/500.html')
content = response.content.decode('utf-8')
assert 'data-anonymous="false"' in content
assert 'Log in' not in content
def test_500_logged_out(self):
response = self.client.get('/services/500')
assert response.status_code == 500
self.assertTemplateUsed(response, 'amo/500.html')
content = response.content.decode('utf-8')
assert 'data-anonymous="true"' in content
assert 'Log in' in content
def test_500_api(self):
# Simulate an early API 500 not caught by DRF
from olympia.api.middleware import APIRequestMiddleware
request = RequestFactory().get('/api/v4/addons/addon/lol/')
APIRequestMiddleware().process_exception(request, Exception())
response = handler500(request)
assert response.status_code == 500
assert response['Content-Type'] == 'application/json'
data = json.loads(response.content)
assert data['detail'] == 'Internal Server Error'
@override_settings(MIDDLEWARE=())
def test_500_early_exception_no_middlewares(self):
# Simulate a early 500 causing middlewares breakage - we should still
# be able to display the 500.
response = self.client.get('/services/500')
assert response.status_code == 500
self.assertTemplateUsed(response, 'amo/500.html')
content = response.content.decode('utf-8')
assert 'data-anonymous="true"' in content
assert 'Log in' not in content # No session, can't show login process.
class TestCommon(TestCase):
fixtures = ('base/users', 'base/addon_3615')
def setUp(self):
super(TestCommon, self).setUp()
self.url = reverse('apps.appversions')
def login(self, user=None, get=False):
email = '%s@mozilla.com' % user
super(TestCommon, self).login(email)
if get:
return UserProfile.objects.get(email=email)
def test_tools_regular_user(self):
self.login('regular')
response = self.client.get(self.url, follow=True)
assert not response.context['request'].user.is_developer
expected = [
('Tools', '#'),
('Submit a New Add-on', reverse('devhub.submit.agreement')),
('Submit a New Theme', reverse('devhub.submit.agreement')),
('Developer Hub', reverse('devhub.index')),
('Manage API Keys', reverse('devhub.api_key')),
]
check_links(expected, pq(response.content)('#aux-nav .tools a'), verify=False)
def test_tools_developer(self):
# Make them a developer.
user = self.login('regular', get=True)
AddonUser.objects.create(user=user, addon=Addon.objects.all()[0])
group = Group.objects.create(name='Staff', rules='Admin:Advanced')
GroupUser.objects.create(group=group, user=user)
response = self.client.get(self.url, follow=True)
assert response.context['request'].user.is_developer
expected = [
('Tools', '#'),
('Manage My Submissions', reverse('devhub.addons')),
('Submit a New Add-on', reverse('devhub.submit.agreement')),
('Submit a New Theme', reverse('devhub.submit.agreement')),
('Developer Hub', reverse('devhub.index')),
('Manage API Keys', reverse('devhub.api_key')),
]
check_links(expected, pq(response.content)('#aux-nav .tools a'), verify=False)
def test_tools_reviewer(self):
self.login('reviewer')
response = self.client.get(self.url, follow=True)
request = response.context['request']
assert not request.user.is_developer
assert acl.action_allowed(request, amo.permissions.ADDONS_REVIEW)
expected = [
('Tools', '#'),
('Submit a New Add-on', reverse('devhub.submit.agreement')),
('Submit a New Theme', reverse('devhub.submit.agreement')),
('Developer Hub', reverse('devhub.index')),
('Manage API Keys', reverse('devhub.api_key')),
('Reviewer Tools', reverse('reviewers.dashboard')),
]
check_links(expected, pq(response.content)('#aux-nav .tools a'), verify=False)
def test_tools_developer_and_reviewer(self):
# Make them a developer.
user = self.login('reviewer', get=True)
AddonUser.objects.create(user=user, addon=Addon.objects.all()[0])
response = self.client.get(self.url, follow=True)
request = response.context['request']
assert request.user.is_developer
assert acl.action_allowed(request, amo.permissions.ADDONS_REVIEW)
expected = [
('Tools', '#'),
('Manage My Submissions', reverse('devhub.addons')),
('Submit a New Add-on', reverse('devhub.submit.agreement')),
('Submit a New Theme', reverse('devhub.submit.agreement')),
('Developer Hub', reverse('devhub.index')),
('Manage API Keys', reverse('devhub.api_key')),
('Reviewer Tools', reverse('reviewers.dashboard')),
]
check_links(expected, pq(response.content)('#aux-nav .tools a'), verify=False)
def test_tools_admin(self):
self.login('admin')
response = self.client.get(self.url, follow=True)
assert response.status_code == 200
request = response.context['request']
assert not request.user.is_developer
assert acl.action_allowed(request, amo.permissions.ADDONS_REVIEW)
assert acl.action_allowed(request, amo.permissions.LOCALIZER)
assert acl.action_allowed(request, amo.permissions.ANY_ADMIN)
expected = [
('Tools', '#'),
('Submit a New Add-on', reverse('devhub.submit.agreement')),
('Submit a New Theme', reverse('devhub.submit.agreement')),
('Developer Hub', reverse('devhub.index')),
('Manage API Keys', reverse('devhub.api_key')),
('Reviewer Tools', reverse('reviewers.dashboard')),
('Admin Tools', reverse('admin:index')),
]
check_links(expected, pq(response.content)('#aux-nav .tools a'), verify=False)
def test_tools_developer_and_admin(self):
# Make them a developer.
user = self.login('admin', get=True)
AddonUser.objects.create(user=user, addon=Addon.objects.all()[0])
response = self.client.get(self.url, follow=True)
assert response.status_code == 200
request = response.context['request']
assert request.user.is_developer
assert acl.action_allowed(request, amo.permissions.ADDONS_REVIEW)
assert acl.action_allowed(request, amo.permissions.LOCALIZER)
assert acl.action_allowed(request, amo.permissions.ANY_ADMIN)
expected = [
('Tools', '#'),
('Manage My Submissions', reverse('devhub.addons')),
('Submit a New Add-on', reverse('devhub.submit.agreement')),
('Submit a New Theme', reverse('devhub.submit.agreement')),
('Developer Hub', reverse('devhub.index')),
('Manage API Keys', reverse('devhub.api_key')),
('Reviewer Tools', reverse('reviewers.dashboard')),
('Admin Tools', reverse('admin:index')),
]
check_links(expected, pq(response.content)('#aux-nav .tools a'), verify=False)
class TestOtherStuff(TestCase):
# Tests that don't need fixtures.
@mock.patch.object(settings, 'READ_ONLY', False)
def test_balloons_no_readonly(self):
response = self.client.get('/en-US/firefox/pages/appversions/')
doc = pq(response.content)
assert doc('#site-notice').length == 0
@mock.patch.object(settings, 'READ_ONLY', True)
def test_balloons_readonly(self):
response = self.client.get('/en-US/firefox/pages/appversions/')
doc = pq(response.content)
assert doc('#site-notice').length == 1
def test_heading(self):
def title_eq(url, alt, text):
response = self.client.get(url + 'pages/appversions/', follow=True)
doc = pq(response.content)
assert alt == doc('.site-title img').attr('alt')
assert text == doc('.site-title').text()
title_eq('/firefox/', 'Firefox', 'Add-ons')
@patch(
'olympia.accounts.utils.default_fxa_login_url',
lambda request: 'https://login.com',
)
def test_login_link(self):
r = self.client.get(reverse('apps.appversions'), follow=True)
doc = pq(r.content)
assert 'https://login.com' == (doc('.account.anonymous a')[1].attrib['href'])
def test_tools_loggedout(self):
r = self.client.get(reverse('apps.appversions'), follow=True)
assert pq(r.content)('#aux-nav .tools').length == 0
def test_language_selector(self):
doc = pq(test.Client().get('/en-US/firefox/pages/appversions/').content)
assert doc('form.languages option[selected]').attr('value') == 'en-us'
def test_language_selector_variables(self):
r = self.client.get('/en-US/firefox/pages/appversions/?foo=fooval&bar=barval')
doc = pq(r.content)('form.languages')
assert doc('input[type=hidden][name=foo]').attr('value') == 'fooval'
assert doc('input[type=hidden][name=bar]').attr('value') == 'barval'
@patch.object(settings, 'KNOWN_PROXIES', ['127.0.0.1'])
@patch.object(core, 'set_remote_addr')
def test_remote_addr(self, set_remote_addr_mock):
"""Make sure we're setting REMOTE_ADDR from X_FORWARDED_FOR."""
client = test.Client()
# Send X-Forwarded-For as it shows up in a wsgi request.
client.get(
'/en-US/developers/',
follow=True,
HTTP_X_FORWARDED_FOR='1.1.1.1',
REMOTE_ADDR='127.0.0.1',
)
assert set_remote_addr_mock.call_count == 2
assert set_remote_addr_mock.call_args_list[0] == (('1.1.1.1',), {})
assert set_remote_addr_mock.call_args_list[1] == ((None,), {})
def test_opensearch(self):
client = test.Client()
result = client.get('/en-US/firefox/opensearch.xml')
assert result.get('Content-Type') == 'text/xml'
doc = etree.fromstring(result.content)
e = doc.find('{http://a9.com/-/spec/opensearch/1.1/}ShortName')
assert e.text == 'Firefox Add-ons'
class TestCORS(TestCase):
fixtures = ('base/addon_3615',)
def get(self, url, **headers):
return self.client.get(url, HTTP_ORIGIN='testserver', **headers)
def options(self, url, **headers):
return self.client.options(url, HTTP_ORIGIN='somewhere', **headers)
def test_no_cors(self):
response = self.get(reverse('devhub.index'))
assert response.status_code == 200
assert not response.has_header('Access-Control-Allow-Origin')
assert not response.has_header('Access-Control-Allow-Credentials')
def test_cors_api_v3(self):
url = reverse_ns('addon-detail', api_version='v3', args=(3615,))
assert '/api/v3/' in url
response = self.get(url)
assert response.status_code == 200
assert not response.has_header('Access-Control-Allow-Credentials')
assert response['Access-Control-Allow-Origin'] == '*'
def test_cors_api_v4(self):
url = reverse_ns('addon-detail', api_version='v4', args=(3615,))
assert '/api/v4/' in url
response = self.get(url)
assert response.status_code == 200
assert not response.has_header('Access-Control-Allow-Credentials')
assert response['Access-Control-Allow-Origin'] == '*'
def test_cors_api_v5(self):
url = reverse_ns('addon-detail', api_version='v4', args=(3615,))
assert '/api/v4/' in url
response = self.get(url)
assert response.status_code == 200
assert not response.has_header('Access-Control-Allow-Credentials')
assert response['Access-Control-Allow-Origin'] == '*'
def test_cors_preflight(self):
url = reverse_ns('addon-detail', args=(3615,))
response = self.options(url)
assert response.status_code == 200
assert response['Access-Control-Allow-Origin'] == '*'
assert sorted(response['Access-Control-Allow-Headers'].lower().split(', ')) == [
'accept',
'accept-encoding',
'authorization',
'content-type',
'dnt',
'origin',
'user-agent',
'x-country-code',
'x-csrftoken',
'x-requested-with',
]
def test_cors_excludes_accounts_session_endpoint(self):
assert (
re.match(
settings.CORS_URLS_REGEX,
urlparse(reverse_ns('accounts.session')).path,
)
is None
)
class TestContribute(TestCase):
def test_contribute_json(self):
result = self.client.get('/contribute.json')
assert result.status_code == 200
assert result.get('Content-Type') == 'application/json'
class TestRobots(TestCase):
@override_settings(ENGAGE_ROBOTS=True)
def test_disable_collections(self):
"""Make sure /en-US/firefox/collections/ gets disabled"""
url = reverse('collections.list')
response = self.client.get('/robots.txt')
assert response.status_code == 200
assert 'Disallow: %s' % url in response.content.decode('utf-8')
@override_settings(ENGAGE_ROBOTS=True)
def test_allow_mozilla_collections(self):
"""Make sure Mozilla collections are allowed"""
id_url = f"{reverse('collections.list')}{settings.TASK_USER_ID}/"
username_url = f"{reverse('collections.list')}mozilla/"
response = self.client.get('/robots.txt')
assert response.status_code == 200
content = response.content.decode('utf-8')
assert f'Allow: {id_url}' in content
assert f'Disallow: {id_url}$' in content
assert f'Allow: {username_url}' in content
assert f'Disallow: {username_url}$' in content
def test_fake_fxa_authorization_correct_values_passed():
with override_settings(DEBUG=True): # USE_FAKE_FXA_AUTH is already True
url = reverse('fake-fxa-authorization')
response = test.Client().get(url, {'state': 'foobar'})
assert response.status_code == 200
doc = pq(response.content)
form = doc('#fake_fxa_authorization')[0]
assert form.attrib['action'] == reverse('auth:accounts.authenticate')
elm = doc('#fake_fxa_authorization input[name=code]')[0]
assert elm.attrib['value'] == 'fakecode'
elm = doc('#fake_fxa_authorization input[name=state]')[0]
assert elm.attrib['value'] == 'foobar'
elm = doc('#fake_fxa_authorization input[name=fake_fxa_email]')
assert elm # No value yet, should just be present.
def test_fake_fxa_authorization_deactivated():
url = reverse('fake-fxa-authorization')
with override_settings(DEBUG=False, USE_FAKE_FXA_AUTH=False):
response = test.Client().get(url)
assert response.status_code == 404
with override_settings(DEBUG=False, USE_FAKE_FXA_AUTH=True):
response = test.Client().get(url)
assert response.status_code == 404
with override_settings(DEBUG=True, USE_FAKE_FXA_AUTH=False):
response = test.Client().get(url)
assert response.status_code == 404
class TestAtomicRequests(WithDynamicEndpointsAndTransactions):
def setUp(self):
super(TestAtomicRequests, self).setUp()
self.slug = get_random_slug()
def _generate_view(self, method_that_will_be_tested):
# A view should *not* be an instancemethod of a class, it prevents
# attributes from being added, which in turns breaks
# non_atomic_requests() silently.
# So we generate one by returning a regular function instead.
def actual_view(request):
Addon.objects.create(slug=self.slug)
raise RuntimeError(
'pretend this is an unhandled exception happening in a view.'
)
return actual_view
def test_post_requests_are_wrapped_in_a_transaction(self):
self.endpoint(self._generate_view('POST'))
qs = Addon.objects.filter(slug=self.slug)
assert not qs.exists()
url = reverse('test-dynamic-endpoint')
try:
with self.assertRaises(RuntimeError):
self.client.post(url)
finally:
# Make sure the transaction was rolled back.
assert qs.count() == 0
qs.all().delete()
def test_get_requests_are_not_wrapped_in_a_transaction(self):
self.endpoint(self._generate_view('GET'))
qs = Addon.objects.filter(slug=self.slug)
assert not qs.exists()
url = reverse('test-dynamic-endpoint')
try:
with self.assertRaises(RuntimeError):
self.client.get(url)
finally:
# Make sure the transaction wasn't rolled back.
assert qs.count() == 1
qs.all().delete()
class TestVersion(TestCase):
def test_version_json(self):
result = self.client.get('/__version__')
assert result.status_code == 200
assert result.get('Content-Type') == 'application/json'
content = result.json()
assert content['python'] == '%s.%s' % (
sys.version_info.major,
sys.version_info.minor,
)
assert content['django'] == '%s.%s' % (django.VERSION[0], django.VERSION[1])
class TestSiteStatusAPI(TestCase):
client_class = APITestClient
def setUp(self):
super().setUp()
self.url = reverse_ns('amo-site-status')
def test_response(self):
response = self.client.get(self.url)
assert response.status_code == 200
assert response.data == {
'read_only': False,
'notice': None,
}
set_config('site_notice', 'THIS is NOT Á TEST!')
with override_settings(READ_ONLY=True):
response = self.client.get(self.url)
assert response.data == {
'read_only': True,
'notice': 'THIS is NOT Á TEST!',
}
TEST_SITEMAPS_DIR = os.path.join(
settings.ROOT, 'src', 'olympia', 'amo', 'tests', 'sitemaps'
)
class TestSitemap(TestCase):
@override_settings(SITEMAP_STORAGE_PATH=TEST_SITEMAPS_DIR)
def test_index(self):
result = self.client.get('/sitemap.xml')
assert result.status_code == 200
assert result.get('Content-Type') == 'application/xml'
assert result[settings.XSENDFILE_HEADER] == os.path.normpath(
get_sitemap_path(None, None)
)
assert result.get('Cache-Control') == 'max-age=3600'
@override_settings(SITEMAP_STORAGE_PATH=TEST_SITEMAPS_DIR)
def test_section(self):
result = self.client.get('/sitemap.xml?section=amo')
assert result.status_code == 200
assert result.get('Content-Type') == 'application/xml'
assert result[settings.XSENDFILE_HEADER] == os.path.normpath(
get_sitemap_path('amo', None)
)
assert result.get('Cache-Control') == 'max-age=3600'
# a section with more than one page
result = self.client.get('/sitemap.xml?section=addons&app_name=firefox&p=2')
assert result.status_code == 200
assert result.get('Content-Type') == 'application/xml'
assert result[settings.XSENDFILE_HEADER] == os.path.normpath(
get_sitemap_path('addons', 'firefox', 2)
)
assert result.get('Cache-Control') == 'max-age=3600'
# and for android
result = self.client.get('/sitemap.xml?section=addons&app_name=android')
assert result.status_code == 200
assert result.get('Content-Type') == 'application/xml'
assert result[settings.XSENDFILE_HEADER] == os.path.normpath(
get_sitemap_path('addons', 'android')
)
assert result.get('Cache-Control') == 'max-age=3600'
@override_settings(SITEMAP_DEBUG_AVAILABLE=True)
def test_debug_requests(self):
# index
result = self.client.get('/sitemap.xml?debug')
assert result.status_code == 200
assert result.get('Content-Type') == 'application/xml'
assert (
b'<sitemap><loc>http://testserver/sitemap.xml?section=amo</loc>'
in result.content
)
# a section
result = self.client.get('/sitemap.xml?section=addons&app_name=firefox&debug')
assert result.status_code == 200
assert result.get('Content-Type') == 'application/xml'
# there aren't any addons so no content
assert (
b'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
b'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n\n</urlset>'
in result.content
)
@override_settings(SITEMAP_DEBUG_AVAILABLE=False)
def test_debug_unavailable_on_prod(self):
result = self.client.get('/sitemap.xml?debug')
# ?debug should be ignored and the request treated as a nginx redirect
assert result.content == b''
assert result[settings.XSENDFILE_HEADER]
@override_settings(SITEMAP_DEBUG_AVAILABLE=True)
def test_exceptions(self):
# check that requesting an out of bounds page number 404s
assert self.client.get('/sitemap.xml?debug&p=10').status_code == 404
assert self.client.get('/sitemap.xml?debug§ion=amo&p=10').status_code == 404
# and a non-integer page number
assert self.client.get('/sitemap.xml?debug&p=a').status_code == 404
assert self.client.get('/sitemap.xml?debug&p=1.3').status_code == 404
# invalid sections should also fail nicely
assert self.client.get('/sitemap.xml?debug§ion=foo').status_code == 404
|
{
"content_hash": "bd796ef9a6a9102f9db9951440437778",
"timestamp": "",
"source": "github",
"line_count": 636,
"max_line_length": 88,
"avg_line_length": 39.30974842767296,
"alnum_prop": 0.6208151673933042,
"repo_name": "bqbn/addons-server",
"id": "7d7c3198f62581c11b90d45e166b5c7fcd178da0",
"size": "25027",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/olympia/amo/tests/test_views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "810080"
},
{
"name": "Dockerfile",
"bytes": "2868"
},
{
"name": "HTML",
"bytes": "585550"
},
{
"name": "JavaScript",
"bytes": "1071952"
},
{
"name": "Makefile",
"bytes": "827"
},
{
"name": "PLSQL",
"bytes": "1074"
},
{
"name": "PLpgSQL",
"bytes": "2381"
},
{
"name": "Python",
"bytes": "5323934"
},
{
"name": "SQLPL",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "11171"
},
{
"name": "Smarty",
"bytes": "1503"
}
],
"symlink_target": ""
}
|
import os
import json
import sys
from Bio import SeqIO
"""
Add the fasta sequence to the JSON to have a single source of truth
"""
fasta_file = sys.argv[1]
json_file = sys.argv[2]
out_file = sys.argv[3]
# Gather the fasta sequences
fasta_dict = {}
with open(fasta_file, 'r') as fastafh:
for record in SeqIO.parse(fastafh, "fasta"):
fasta_dict[str(record.id)] = str(record.seq)
# Combine the fasta sequences with the other data and print it out
with open(json_file, 'r') as jsonfh:
json_data = json.load(jsonfh)
for k,v in fasta_dict.items():
if k in json_data["O"]:
json_data["O"][k]["seq"] = v
elif k in json_data["H"]:
json_data["H"][k]["seq"] = v
else:
print("{} does not exist".format(k), file=sys.stderr)
with open(out_file, 'w') as jsonofh:
jsonofh.write(json.dumps(json_data))
|
{
"content_hash": "880c0ae3d9bd8a3dbe4752a002671a56",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 67,
"avg_line_length": 23.342105263157894,
"alnum_prop": 0.6144306651634723,
"repo_name": "phac-nml/ecoli_serotyping",
"id": "ce1eb9cfc61f41262e2c8eb38b1a97c76cfae603",
"size": "910",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "helper_scripts/single_json.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "79608"
}
],
"symlink_target": ""
}
|
import logging
from belogging.loader import BeloggingLoader
loader = BeloggingLoader(json=True)
loader.setup()
logger = logging.getLogger('foo')
logger.info('foo')
|
{
"content_hash": "88f5ccc8df6e42a58b61a4cd154d11b3",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 44,
"avg_line_length": 16.8,
"alnum_prop": 0.7797619047619048,
"repo_name": "georgeyk/belogging",
"id": "4fcfa050962452a0421c5e09e36b7bd74002634d",
"size": "168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/json_example.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "701"
},
{
"name": "Python",
"bytes": "16040"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Application',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField()),
('icon', models.ImageField(upload_to='lizard_apps/')),
('name', models.CharField(db_index=True, help_text='Descriptive text for labeling the icon', max_length=64)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='ApplicationScreen',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order', models.IntegerField(blank=True, null=True)),
('application', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lizard_apps.Application')),
],
options={
'ordering': ['screen', 'order', 'application__name'],
},
),
migrations.CreateModel(
name='Screen',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(max_length=64, unique=True)),
('applications', models.ManyToManyField(related_name='screens', through='lizard_apps.ApplicationScreen', to='lizard_apps.Application')),
],
options={
'ordering': ['slug'],
},
),
migrations.AddField(
model_name='applicationscreen',
name='screen',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lizard_apps.Screen'),
),
migrations.AlterUniqueTogether(
name='applicationscreen',
unique_together=set([('screen', 'application')]),
),
]
|
{
"content_hash": "e42a2c6b9805254d8b53550ecbbfa7f1",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 152,
"avg_line_length": 37.91379310344828,
"alnum_prop": 0.55025011368804,
"repo_name": "lizardsystem/lizard-apps",
"id": "3265d4f43408acfa0c802a9d21f4e0b6dac04c62",
"size": "2271",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lizard_apps/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2538"
},
{
"name": "JavaScript",
"bytes": "4516"
},
{
"name": "Python",
"bytes": "8657"
}
],
"symlink_target": ""
}
|
"""
Replay Parsing
~~~~~~~~~~~~~~
World of Tanks replay parsing and information extraction
"""
import json
import struct
import pickle
from copy import copy
from .constants import WOT_TANKS
def parse_replay(replay_blob):
"""
Parse the replay file and return the extracted information as Python dictionary
"""
num_blocks = struct.unpack('I', replay_blob[4:8])[0]
first_chunk_length = struct.unpack('I', replay_blob[8:12])[0]
first_chunk = replay_blob[12:12 + first_chunk_length]
second_chunk_length = struct.unpack('I', replay_blob[12 + first_chunk_length:12 + first_chunk_length + 4])[0]
second_chunk_start = 12 + first_chunk_length + 4
second_chunk = replay_blob[second_chunk_start:second_chunk_start + second_chunk_length]
try:
first_chunk = json.loads(first_chunk.decode('utf-8'))
except UnicodeDecodeError:
# if we can't decode the first chunk, this is probably not even a wotreplay file
return None
try:
second_chunk = json.loads(second_chunk.decode('utf-8'))
except UnicodeDecodeError:
# Second chunk does not exist if the battle was left before it ended
second_chunk = None
# after the second JSON chunk there is a Python serialized dictionary (pickle)
the_pickle = None
if num_blocks == 3:
try:
pickle_start = second_chunk_start + second_chunk_length + 4
pickle_length = struct.unpack('I',
replay_blob[second_chunk_start +
second_chunk_length:second_chunk_start +
second_chunk_length + 4])[0]
the_pickle = pickle.loads(replay_blob[pickle_start:pickle_start + pickle_length])
except pickle.UnpicklingError:
the_pickle = None
return {'first': first_chunk,
'second': second_chunk,
'pickle': the_pickle}
def players_list(replay_json, team):
""" Return the list of players of a team
:param replay_json:
:param team: 1 for first, 2 for second team
:return:
"""
vehicles = [copy(v) for v in replay_json['second'][1].values() if v['team'] == team]
for v in vehicles:
if v['vehicleType'] and len(v['vehicleType'].split(":")) == 2:
v['vehicleType'] = v['vehicleType'].split(":")[1].replace("_", " ")
else:
# not spotted?
v['vehicleType'] = None
return vehicles
def player_won(replay_json):
own_team = get_own_team(replay_json)
return replay_json['second'][0]['common']['winnerTeam'] == own_team
def get_own_team(replay_json):
player_name = replay_json['first']['playerName']
for v in replay_json['first']['vehicles'].itervalues():
if v['name'] == player_name:
return v['team']
def player_team(replay_json):
""" Returns a list of names of the players on the replay recorder's team """
own_team = get_own_team(replay_json)
return [v['name'] for v in replay_json['first']['vehicles'].values() if v['team'] == own_team]
def is_stronghold(replay_json):
""" Returns whether the replay is from a stronghold battle """
return replay_json['first']['battleType'] == 11
def is_cw(replay_json):
"""
Returns whether the replay is probably from a clan war, i.e.
all players of each team belong to the same clan
:param replay_json:
:return:
"""
team_one = players_list(replay_json, 1)
team_two = players_list(replay_json, 2)
return len(set(p['clanAbbrev'] for p in team_one)) == 1 and len(set(p['clanAbbrev'] for p in team_two)) == 1 \
and guess_clan(replay_json) != guess_enemy_clan(replay_json)
def guess_clan(replay_json):
""" Attempt to guess the friendly clan name from the replay.
Use is_cw(replay_json) before calling this to confirm it was a clan war.
"""
player_name = replay_json['first']['playerName']
for v in replay_json['first']['vehicles'].itervalues():
if v['name'] == player_name:
return v['clanAbbrev']
def guess_enemy_clan(replay_json):
""" Attempt to guess the enemy clan name from the replay.
Use is_cw(replay_json) before calling this to confirm it was a clan war.
:param replay_json:
:return:
"""
own_team = get_own_team(replay_json)
return players_list(replay_json, 1 if own_team == 2 else 2)[0]['clanAbbrev']
def score(replay_json):
own_team = get_own_team(replay_json)
own_team_deaths = 0
enemy_team_deaths = 0
for v in replay_json['second'][0]['vehicles'].itervalues():
v = v[0]
if v['deathReason'] != -1:
if v['team'] == own_team:
own_team_deaths += 1
else:
enemy_team_deaths += 1
return enemy_team_deaths, own_team_deaths
def resources_earned(json_second, player_id):
for v in json_second[0]['vehicles'].itervalues():
v = v[0]
if str(v["accountDBID"]) == str(player_id):
return v["fortResource"]
def player_performance(json_second, vehicles, players):
tank_info_by_player_name = {}
for k, v in json_second[1].iteritems():
if not v['vehicleType']:
# unrevealed enemy tank?
continue
# extract the tank text_id of the player
tank_id = v['vehicleType'].split(':')[1]
tank_info_by_player_name[v['name']] = WOT_TANKS.get(tank_id, {'tier': 10})
perf = dict()
for k, v in vehicles.iteritems():
if isinstance(v, list):
v = v[0] # new replay version format ..
if str(str(v['accountDBID'])) in players:
player_name = players[str(v['accountDBID'])]['name']
else:
player_name = players[v['accountDBID']]['name']
if player_name not in tank_info_by_player_name:
continue
perf[str(v['accountDBID'])] = {
'tank_info': tank_info_by_player_name[player_name],
'damageDealt': v['damageDealt'],
'potentialDamageReceived': v['potentialDamageReceived'],
'xp': v['xp'],
'kills': v['kills'],
'shots': v['shots'],
'pierced': v['piercings'] if 'piercings' in v else v['pierced'],
'capturePoints': v['capturePoints'],
'droppedCapturePoints': v['droppedCapturePoints'],
'spotted': v['spotted'],
'survived': v['deathReason'] == -1, # no death reason = survived?
'damageAssistedRadio': v['damageAssistedRadio'],
}
return perf
|
{
"content_hash": "e7e690a8d2ae52d5dc3ef3412bdb7758",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 114,
"avg_line_length": 35.902702702702705,
"alnum_prop": 0.5897320084311954,
"repo_name": "ceari/whyattend",
"id": "d24a749dc5fda8a9043de362c07d50845d46c3e5",
"size": "6642",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "whyattend/replays.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "9584"
},
{
"name": "HTML",
"bytes": "110189"
},
{
"name": "JavaScript",
"bytes": "38939"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "138633"
},
{
"name": "Ruby",
"bytes": "229"
},
{
"name": "Shell",
"bytes": "586"
}
],
"symlink_target": ""
}
|
"""
.. module:: lesscpy.plib.identifier
:synopsis: Identifier node.
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Johann T. Mariusson <jtm@robot.is>
"""
import re
from .node import Node
from lesscpy.lessc import utility
from lesscpy.lib import reserved
class Identifier(Node):
"""Identifier node. Represents block identifier.
"""
def parse(self, scope):
"""Parse node. Block identifiers are stored as
strings with spaces replaced with ?
args:
scope (Scope): Current scope
raises:
SyntaxError
returns:
self
"""
names = []
name = []
self._subp = (
'@media', '@keyframes',
'@-moz-keyframes', '@-webkit-keyframes',
'@-ms-keyframes'
)
if self.tokens and hasattr(self.tokens, 'parse'):
self.tokens = list(utility.flatten([id.split() + [',']
for id in self.tokens.parse(scope).split(',')]))
self.tokens.pop()
if self.tokens and any(hasattr(t, 'parse') for t in self.tokens):
tmp_tokens = []
for t in self.tokens:
if hasattr(t, 'parse'):
tmp_tokens.append(t.parse(scope))
else:
tmp_tokens.append(t)
self.tokens = list(utility.flatten(tmp_tokens))
if self.tokens and self.tokens[0] in self._subp:
name = list(utility.flatten(self.tokens))
self.subparse = True
else:
self.subparse = False
for n in utility.flatten(self.tokens):
if n == '*':
name.append('* ')
elif n in '>+~':
if name and name[-1] == ' ':
name.pop()
name.append('?%s?' % n)
elif n == ',':
names.append(name)
name = []
else:
name.append(n)
names.append(name)
parsed = self.root(scope, names) if scope else names
# Interpolated selectors need another step, we have to replace variables. Avoid reserved words though
#
# Example: '.@{var}' results in [['.', '@{var}']]
# But: '@media print' results in [['@media', ' ', 'print']]
#
def replace_variables(tokens, scope):
return [scope.swap(t)
if (utility.is_variable(t) and not t in reserved.tokens)
else t
for t in tokens]
parsed = [list(utility.flatten(replace_variables(part, scope))) for part in parsed]
self.parsed = [[i for i, j in utility.pairwise(part)
if i != ' ' or (j and '?' not in j)]
for part in parsed]
return self
def root(self, scope, names):
"""Find root of identifier, from scope
args:
scope (Scope): current scope
names (list): identifier name list (, separated identifiers)
returns:
list
"""
parent = scope.scopename
if parent:
parent = parent[-1]
if parent.parsed:
parsed_names = []
for name in names:
ampersand_count = name.count('&')
if ampersand_count:
filtered_parts = []
for part in parent.parsed:
if part and part[0] not in self._subp:
filtered_parts.append(part)
permutations = list(utility.permutations_with_replacement(filtered_parts, ampersand_count))
for permutation in permutations:
parsed = []
for name_part in name:
if name_part == "&":
parent_part = permutation.pop(0)
if parsed and parsed[-1].endswith(']'):
parsed.extend(' ')
if parent_part[-1] == ' ':
parent_part.pop()
parsed.extend(parent_part)
else:
parsed.append(name_part)
parsed_names.append(parsed)
else:
# NOTE(saschpe): Maybe this code can be expressed with permutations too?
for part in parent.parsed:
if part and part[0] not in self._subp:
parsed = []
if name[0] == "@media":
parsed.extend(name)
else:
parsed.extend(part)
if part[-1] != ' ':
parsed.append(' ')
parsed.extend(name)
parsed_names.append(parsed)
else:
parsed_names.append(name)
return parsed_names
return names
def raw(self, clean=False):
"""Raw identifier.
args:
clean (bool): clean name
returns:
str
"""
if clean:
return ''.join(''.join(p) for p in self.parsed).replace('?', ' ')
return '%'.join('%'.join(p) for p in self.parsed).strip().strip('%')
def copy(self):
""" Return copy of self
Returns:
Identifier object
"""
tokens = ([t for t in self.tokens]
if isinstance(self.tokens, list)
else self.tokens)
return Identifier(tokens, 0)
def fmt(self, fills):
"""Format identifier
args:
fills (dict): replacements
returns:
str (CSS)
"""
name = ',$$'.join(''.join(p).strip()
for p in self.parsed)
name = re.sub('\?(.)\?', '%(ws)s\\1%(ws)s', name) % fills
return name.replace('$$', fills['nl']).replace(' ', ' ')
|
{
"content_hash": "65a1fbfa1446ee780ca6c98bf5a4cc1f",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 115,
"avg_line_length": 37.81656804733728,
"alnum_prop": 0.4334219996870599,
"repo_name": "fivethreeo/lesscpy",
"id": "68c55c7ceda4a18747d44e5c732d8f84f464c3f0",
"size": "6414",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lesscpy/plib/identifier.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "71435"
},
{
"name": "Python",
"bytes": "168003"
}
],
"symlink_target": ""
}
|
from shelve import Shelf
import pytc
from shelf_with_hooks import ShelfWithHooks
class PytcHashShelf(ShelfWithHooks):
DB_CLASS = pytc.HDB
def __init__(self, filename, flag, key_type='str', dump_method='json',
cached=True, writeback=False):
db = self.DB_CLASS()
if flag == 'r':
flags = pytc.BDBOREADER
elif flag == 'c':
flags = pytc.BDBOWRITER | pytc.BDBOREADER | pytc.BDBOCREAT
else:
raise NotImplementedError
db.open(filename, flags)
Shelf.__init__(self, db, -1, writeback)
self._setup_methods(cached, key_type, dump_method)
def __delitem__(self, key):
pass
def __del__(self):
self.close()
def close(self):
self.dict.close()
class PytcBtreeShelf(PytcHashShelf):
DB_CLASS = pytc.BDB
|
{
"content_hash": "9343f00ae00567e103c3fea7d676f29c",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 74,
"avg_line_length": 23.63888888888889,
"alnum_prop": 0.5898942420681551,
"repo_name": "artemrizhov/pymorphy",
"id": "3673715c49d3024e70bd5f6c3c1f837ebbcf05e2",
"size": "867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymorphy/backends/shelve_source/pytc_shelve.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "217481"
},
{
"name": "Shell",
"bytes": "143"
}
],
"symlink_target": ""
}
|
"""
Example use of LevelDB operators.
"""
from airflow import models
from airflow.providers.google.leveldb.operators.leveldb import LevelDBOperator
from airflow.utils.dates import days_ago
with models.DAG(
'example_leveldb',
start_date=days_ago(2),
schedule_interval=None,
tags=['example'],
) as dag:
# [START howto_operator_leveldb_get_key]
get_key_leveldb_task = LevelDBOperator(
task_id='get_key_leveldb', leveldb_conn_id='leveldb_default', command='get', key=b'key', dag=dag
)
# [END howto_operator_leveldb_get_key]
# [START howto_operator_leveldb_put_key]
put_key_leveldb_task = LevelDBOperator(
task_id='put_key_leveldb',
leveldb_conn_id='leveldb_default',
command='put',
key=b'another_key',
value=b'another_value',
dag=dag,
)
# [END howto_operator_leveldb_put_key]
get_key_leveldb_task >> put_key_leveldb_task
|
{
"content_hash": "1fe07373ce25c63fc30fe91c82478fdb",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 104,
"avg_line_length": 30.9,
"alnum_prop": 0.6645091693635383,
"repo_name": "dhuang/incubator-airflow",
"id": "e4a732c71d6bff53274048d5d9c815175bef9f63",
"size": "1714",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "airflow/providers/google/leveldb/example_dags/example_leveldb.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "109698"
},
{
"name": "HTML",
"bytes": "264851"
},
{
"name": "JavaScript",
"bytes": "1988427"
},
{
"name": "Mako",
"bytes": "1037"
},
{
"name": "Python",
"bytes": "3357958"
},
{
"name": "Shell",
"bytes": "34442"
}
],
"symlink_target": ""
}
|
"""Database setup and migration commands."""
from oslo.config import cfg
from example.common import utils
CONF = cfg.CONF
CONF.import_opt('backend',
'example.openstack.common.db.api',
group='database')
IMPL = utils.LazyPluggable(
pivot='backend',
config_group='database',
sqlalchemy='example.db.sqlalchemy.migration')
INIT_VERSION = 0
def db_sync(version=None):
"""Migrate the database to `version` or the most recent version."""
return IMPL.db_sync(version=version)
def db_version():
"""Display the current database version."""
return IMPL.db_version()
|
{
"content_hash": "8606a2800c7d67ee5be5e20bee9860e1",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 71,
"avg_line_length": 23.51851851851852,
"alnum_prop": 0.6614173228346457,
"repo_name": "JimJiangX/BoneDragon",
"id": "3c7be0dcd5a04c4dd1d52f2b83200901da3a1ad9",
"size": "1412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example/db/migration.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "74985"
},
{
"name": "Shell",
"bytes": "1030"
}
],
"symlink_target": ""
}
|
import uuid
import httpretty
from keystoneclient import exceptions
from keystoneclient.tests.v3 import utils
from keystoneclient.v3 import users
class UserTests(utils.TestCase, utils.CrudTests):
def setUp(self):
super(UserTests, self).setUp()
self.key = 'user'
self.collection_key = 'users'
self.model = users.User
self.manager = self.client.users
def new_ref(self, **kwargs):
kwargs = super(UserTests, self).new_ref(**kwargs)
kwargs.setdefault('description', uuid.uuid4().hex)
kwargs.setdefault('domain_id', uuid.uuid4().hex)
kwargs.setdefault('enabled', True)
kwargs.setdefault('name', uuid.uuid4().hex)
kwargs.setdefault('default_project_id', uuid.uuid4().hex)
return kwargs
@httpretty.activate
def test_add_user_to_group(self):
group_id = uuid.uuid4().hex
ref = self.new_ref()
self.stub_url(httpretty.PUT,
['groups', group_id, self.collection_key, ref['id']],
status=204)
self.manager.add_to_group(user=ref['id'], group=group_id)
self.assertRaises(exceptions.ValidationError,
self.manager.remove_from_group,
user=ref['id'],
group=None)
@httpretty.activate
def test_list_users_in_group(self):
group_id = uuid.uuid4().hex
ref_list = [self.new_ref(), self.new_ref()]
self.stub_entity(httpretty.GET,
['groups', group_id, self.collection_key],
entity=ref_list)
returned_list = self.manager.list(group=group_id)
self.assertTrue(len(returned_list))
[self.assertTrue(isinstance(r, self.model)) for r in returned_list]
@httpretty.activate
def test_check_user_in_group(self):
group_id = uuid.uuid4().hex
ref = self.new_ref()
self.stub_url(httpretty.HEAD,
['groups', group_id, self.collection_key, ref['id']],
status=204)
self.manager.check_in_group(user=ref['id'], group=group_id)
self.assertRaises(exceptions.ValidationError,
self.manager.check_in_group,
user=ref['id'],
group=None)
@httpretty.activate
def test_remove_user_from_group(self):
group_id = uuid.uuid4().hex
ref = self.new_ref()
self.stub_url(httpretty.DELETE,
['groups', group_id, self.collection_key, ref['id']],
status=204)
self.manager.remove_from_group(user=ref['id'], group=group_id)
self.assertRaises(exceptions.ValidationError,
self.manager.remove_from_group,
user=ref['id'],
group=None)
@httpretty.activate
def test_create_with_project(self):
# Can create a user with the deprecated project option rather than
# default_project_id.
ref = self.new_ref()
self.stub_entity(httpretty.POST, [self.collection_key],
status=201, entity=ref)
req_ref = ref.copy()
req_ref.pop('id')
param_ref = req_ref.copy()
# Use deprecated project_id rather than new default_project_id.
param_ref['project_id'] = param_ref.pop('default_project_id')
params = utils.parameterize(param_ref)
returned = self.manager.create(**params)
self.assertTrue(isinstance(returned, self.model))
for attr in ref:
self.assertEqual(
getattr(returned, attr),
ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
@httpretty.activate
def test_create_with_project_and_default_project(self):
# Can create a user with the deprecated project and default_project_id.
# The backend call should only pass the default_project_id.
ref = self.new_ref()
self.stub_entity(httpretty.POST,
[self.collection_key],
status=201, entity=ref)
req_ref = ref.copy()
req_ref.pop('id')
param_ref = req_ref.copy()
# Add the deprecated project_id in the call, the value will be ignored.
param_ref['project_id'] = 'project'
params = utils.parameterize(param_ref)
returned = self.manager.create(**params)
self.assertTrue(isinstance(returned, self.model))
for attr in ref:
self.assertEqual(
getattr(returned, attr),
ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
@httpretty.activate
def test_update_with_project(self):
# Can update a user with the deprecated project option rather than
# default_project_id.
ref = self.new_ref()
req_ref = ref.copy()
req_ref.pop('id')
param_ref = req_ref.copy()
self.stub_entity(httpretty.PATCH,
[self.collection_key, ref['id']],
status=200, entity=ref)
# Use deprecated project_id rather than new default_project_id.
param_ref['project_id'] = param_ref.pop('default_project_id')
params = utils.parameterize(param_ref)
returned = self.manager.update(ref['id'], **params)
self.assertTrue(isinstance(returned, self.model))
for attr in ref:
self.assertEqual(
getattr(returned, attr),
ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
@httpretty.activate
def test_update_with_project_and_default_project(self, ref=None):
ref = self.new_ref()
req_ref = ref.copy()
req_ref.pop('id')
param_ref = req_ref.copy()
self.stub_entity(httpretty.PATCH,
[self.collection_key, ref['id']],
status=200, entity=ref)
# Add the deprecated project_id in the call, the value will be ignored.
param_ref['project_id'] = 'project'
params = utils.parameterize(param_ref)
returned = self.manager.update(ref['id'], **params)
self.assertTrue(isinstance(returned, self.model))
for attr in ref:
self.assertEqual(
getattr(returned, attr),
ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
|
{
"content_hash": "2e13c21f752d05f0dfa4afbd6013e7de",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 79,
"avg_line_length": 35.913513513513514,
"alnum_prop": 0.5684828416616496,
"repo_name": "citrix-openstack-build/python-keystoneclient",
"id": "92135ddaa70bae06432da4b4069c72b47ecd2ca0",
"size": "7268",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keystoneclient/tests/v3/test_users.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16002"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "648424"
},
{
"name": "Shell",
"bytes": "11335"
}
],
"symlink_target": ""
}
|
import argparse
import sys
from mypyli import taxstring, kraken
def parse_kraken(kraken_f):
id_dict = {}
krak = kraken.KrakenRecord.parse_kraken_file(kraken_f, iterate=True)
for entry in krak:
if entry.classified:
id_dict[entry.name] = entry.tax_id
else:
id_dict[entry.name] = "unclassified"
return id_dict
def parse_blast6(blast6_f):
id_dict = {}
with open(blast6_f, 'r') as IN:
for line in IN:
elements = line[:-1].split("\t")
contig_gene = elements[0]
contig = contig_gene.split("|")[0]
taxid = elements[-1]
# take the first assignment if there are more than one
if ";" in taxid:
taxid = taxid.split(";")[0]
if contig in id_dict:
id_dict[contig][taxid] = id_dict[contig].get(taxid, 0) + 1
else:
id_dict[contig] = {taxid: 1}
# make a final dictionary using the most abundanct taxid for each contig
final_dict = {}
for contig, tid_dicts in id_dict.items():
tids = sorted(tid_dicts, key=lambda tid: tid_dicts[tid], reverse=True)
final_dict[contig] = tids[0]
return final_dict
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-k", help="kraken output", required=True)
parser.add_argument("-b", help="blast output", required=True)
args = parser.parse_args()
kraken_dict = parse_kraken(args.k)
blast_dict = parse_blast6(args.b)
both_unclass = 0
k_unclass = 0
b_unclass = 0
same_class = 0
lin_divides = {'k': 0, 'p': 0, 'c': 0, 'o': 0, 'f':0, 'g': 0, 's': 0, 'N': 0}
correct_contigs = []
processed = 0
total = len(kraken_dict)
for contig, k_assign in kraken_dict.items():
b_assign = blast_dict.get(contig, "unclassified")
if k_assign == "unclassified":
if b_assign == "unclassified":
both_unclass += 1
else:
k_unclass += 1
else:
if b_assign == "unclassified":
b_unclass += 1
else:
try:
k_tax = taxstring.TaxString(tax=k_assign, is_id=True, lookup=True)
b_tax = taxstring.TaxString(tax=b_assign, is_id=True, lookup=True)
if k_tax.get_tax_string() == b_tax.get_tax_string():
same_class += 1
correct_contigs.append(contig)
else:
divide = taxstring.TaxString.lineage_divides_at(k_tax, b_tax)
lin_divides[divide[0]] += 1
if divide in ["family", "genus", "species"]:
correct_contigs.append(contig)
except: # no acceptions allowed
print("Exception for contig: {}".format(contig))
if processed / 500 == int(processed / 500):
print("{} of {}({}%) completed...".format(processed, total, int(processed / total * 100)))
processed += 1
#if stop > 10:
# break
#stop += 1
print("\n"*3)
print(("both_unclass", both_unclass))
print(("k_unclass", k_unclass))
print(("b_unclass", b_unclass))
print(("same_class", same_class))
print("Splits at:")
for k in ['k', 'p', 'c', 'o', 'f', 'g', 's', 'N']:
print(" {}\t{}".format(k, lin_divides[k]))
with open("correct_contigs.txt", 'w') as OUT:
for contig in sorted(correct_contigs):
OUT.write("{}\n".format(contig))
|
{
"content_hash": "3fe5a66bf7edcfd5546eb9123a679518",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 102,
"avg_line_length": 32.04424778761062,
"alnum_prop": 0.5197459265396299,
"repo_name": "hunter-cameron/Bioinformatics",
"id": "b81e8fba8ac4eb2acb0b5acbb8df97d16ee9cfc6",
"size": "3623",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "job_scripts/plate_scrapes/kraken/comp_kraken_and_blast.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "8199"
},
{
"name": "Makefile",
"bytes": "105"
},
{
"name": "Perl",
"bytes": "272243"
},
{
"name": "Python",
"bytes": "770358"
},
{
"name": "R",
"bytes": "29460"
},
{
"name": "Shell",
"bytes": "34582"
}
],
"symlink_target": ""
}
|
import os
from setuptools import find_packages
from setuptools import setup
version = '0.01'
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.md')).read()
except IOError:
README = ''
install_requires = [
'numpy',
'Theano',
]
tests_require = [
'mock',
'pytest',
]
setup(
name="nnGen",
version=version,
description="neural network generator",
long_description="\n\n".join(README),
keywords="",
author="Derek Larson",
author_email="larson.derek.a@gmail.com",
url="https://github.com/dereklarson/nnGen",
license="MIT",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
extras_require={
'testing': tests_require
}
)
|
{
"content_hash": "f6e07d5535164bb34d1a5324c1f1d7c7",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 57,
"avg_line_length": 20.625,
"alnum_prop": 0.6278787878787879,
"repo_name": "dereklarson/nnGen",
"id": "8cf5d1bd1a1fca2231fbaba3736c962d8e1b4b54",
"size": "825",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "7307"
},
{
"name": "Python",
"bytes": "58026"
}
],
"symlink_target": ""
}
|
import numpy as np
from montemodes.classes.structure import Structure
from montemodes.analysis.symmetry_analysis import get_symmetry_analysis
import montemodes.analysis.distorsion as distorsion
import functions.shape
print functions.shape.get_info(vertices=4)
exit()
def random_alteration_coordinates_box(initial_coordinates, fix_center=None, max_displacement=0.2):
displacement = 2 * max_displacement * (np.random.random(np.array(initial_coordinates).shape) - 0.5)
if fix_center is not None:
displacement[fix_center] *= 0.0
return initial_coordinates + displacement
# Initial perfect tetrahedral structure
initial_coordinates = [[ 0.5784585, 0.7670811, 1.3587379],
[-1.7015514, -0.0389921, -0.0374715],
[ 0.5784290, -1.6512236, -0.0374715],
[ 0.5784585, 0.7670811, -1.4336809],
[ 0.0000000, 0.0000000, 0.0000000]]
atomic_elements = ['O', 'O', 'O', 'O', 'P']
total_proportions = []
for expansion_box in np.arange(0.1, 1.0, 0.1):
#if True:
# expansion_box = 0.8
number_of_samples = 20
structures = []
for i in range(number_of_samples):
coordinates = random_alteration_coordinates_box(initial_coordinates, fix_center=4, max_displacement=expansion_box)
structures.append(Structure(coordinates=np.array(coordinates, dtype=float),
atomic_elements=np.array(atomic_elements, dtype=str)[None].T))
dist_OPO = distorsion.get_distortion_statistic_analysis(structures,
distorsion.get_distortion_indices_angles,
['O', 'P', 'O'],
show_plots=False)
dist_OP = distorsion.get_distortion_statistic_analysis(structures,
distorsion.get_distortion_indices_distances,
['O', 'P'],
show_plots=False)
print 'expansion', expansion_box, dist_OP['average'], dist_OPO['average']
proportion = get_symmetry_analysis(structures,
symmetry_to_analyze=['c 2', 'c 3', 's 4', 'r'],
shape_to_analyze=2,
central_atom=5,
symmetry_threshold=0.15,
cutoff_shape=5.0,
show_plots=False)
total_proportions.append(proportion)
print ' '.join(total_proportions[0].keys())
for proportion in total_proportions:
print ' '.join(np.array(proportion.values(), dtype=str))
|
{
"content_hash": "c06458072e2b4dfbcbc2b633d1371690",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 122,
"avg_line_length": 41.69117647058823,
"alnum_prop": 0.5379188712522046,
"repo_name": "abelcarreras/MonteModes",
"id": "33fcaca68b260cdeed74117d14e409c232320fc0",
"size": "2835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/symrand.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "74723"
}
],
"symlink_target": ""
}
|
for i, slot in enumerate(obj.material_slots):
|
{
"content_hash": "1896775f8e3dbd119284e8fb7b9b1f1e",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 45,
"avg_line_length": 46,
"alnum_prop": 0.7608695652173914,
"repo_name": "MaxReimann/troen-simulator",
"id": "669c6d1e53a06b00b7957b572fa70809b2d8353c",
"size": "46",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/util/change_tex.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "22"
},
{
"name": "C++",
"bytes": "679046"
},
{
"name": "CMake",
"bytes": "62263"
},
{
"name": "GLSL",
"bytes": "18032"
},
{
"name": "JavaScript",
"bytes": "4057"
},
{
"name": "Python",
"bytes": "42425"
}
],
"symlink_target": ""
}
|
import os
from cached_property import cached_property
from airflow.configuration import conf
from airflow.utils.log.file_task_handler import FileTaskHandler
from airflow.utils.log.logging_mixin import LoggingMixin
class S3TaskHandler(FileTaskHandler, LoggingMixin):
"""
S3TaskHandler is a python log handler that handles and reads
task instance logs. It extends airflow FileTaskHandler and
uploads to and reads from S3 remote storage.
"""
def __init__(self, base_log_folder, s3_log_folder, filename_template):
super().__init__(base_log_folder, filename_template)
self.remote_base = s3_log_folder
self.log_relative_path = ''
self._hook = None
self.closed = False
self.upload_on_close = True
@cached_property
def hook(self):
remote_conn_id = conf.get('core', 'REMOTE_LOG_CONN_ID')
try:
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
return S3Hook(remote_conn_id)
except Exception:
self.log.error(
'Could not create an S3Hook with connection id "%s". '
'Please make sure that airflow[aws] is installed and '
'the S3 connection exists.', remote_conn_id
)
def set_context(self, ti):
super().set_context(ti)
# Local location and remote location is needed to open and
# upload local log file to S3 remote storage.
self.log_relative_path = self._render_filename(ti, ti.try_number)
self.upload_on_close = not ti.raw
def close(self):
"""
Close and upload local log file to remote storage S3.
"""
# When application exit, system shuts down all handlers by
# calling close method. Here we check if logger is already
# closed to prevent uploading the log to remote storage multiple
# times when `logging.shutdown` is called.
if self.closed:
return
super().close()
if not self.upload_on_close:
return
local_loc = os.path.join(self.local_base, self.log_relative_path)
remote_loc = os.path.join(self.remote_base, self.log_relative_path)
if os.path.exists(local_loc):
# read log and remove old logs to get just the latest additions
with open(local_loc, 'r') as logfile:
log = logfile.read()
self.s3_write(log, remote_loc)
# Mark closed so we don't double write if close is called twice
self.closed = True
def _read(self, ti, try_number, metadata=None):
"""
Read logs of given task instance and try_number from S3 remote storage.
If failed, read the log from task instance host machine.
:param ti: task instance object
:param try_number: task instance try_number to read logs from
:param metadata: log metadata,
can be used for steaming log reading and auto-tailing.
"""
# Explicitly getting log relative path is necessary as the given
# task instance might be different than task instance passed in
# in set_context method.
log_relative_path = self._render_filename(ti, try_number)
remote_loc = os.path.join(self.remote_base, log_relative_path)
if self.s3_log_exists(remote_loc):
# If S3 remote file exists, we do not fetch logs from task instance
# local machine even if there are errors reading remote logs, as
# returned remote_log will contain error messages.
remote_log = self.s3_read(remote_loc, return_error=True)
log = '*** Reading remote log from {}.\n{}\n'.format(
remote_loc, remote_log)
return log, {'end_of_log': True}
else:
return super()._read(ti, try_number)
def s3_log_exists(self, remote_log_location):
"""
Check if remote_log_location exists in remote storage
:param remote_log_location: log's location in remote storage
:return: True if location exists else False
"""
try:
return self.hook.get_key(remote_log_location) is not None
except Exception:
pass
return False
def s3_read(self, remote_log_location, return_error=False):
"""
Returns the log found at the remote_log_location. Returns '' if no
logs are found or there is an error.
:param remote_log_location: the log's location in remote storage
:type remote_log_location: str (path)
:param return_error: if True, returns a string error message if an
error occurs. Otherwise returns '' when an error occurs.
:type return_error: bool
"""
try:
return self.hook.read_key(remote_log_location)
except Exception:
msg = 'Could not read logs from {}'.format(remote_log_location)
self.log.exception(msg)
# return error if needed
if return_error:
return msg
def s3_write(self, log, remote_log_location, append=True):
"""
Writes the log to the remote_log_location. Fails silently if no hook
was created.
:param log: the log to write to the remote_log_location
:type log: str
:param remote_log_location: the log's location in remote storage
:type remote_log_location: str (path)
:param append: if False, any existing log file is overwritten. If True,
the new log is appended to any existing logs.
:type append: bool
"""
if append and self.s3_log_exists(remote_log_location):
old_log = self.s3_read(remote_log_location)
log = '\n'.join([old_log, log]) if old_log else log
try:
self.hook.load_string(
log,
key=remote_log_location,
replace=True,
encrypt=conf.getboolean('core', 'ENCRYPT_S3_LOGS'),
)
except Exception:
self.log.exception('Could not write logs to %s', remote_log_location)
|
{
"content_hash": "86f7d9ca186c71239e3e9da71891ad8a",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 81,
"avg_line_length": 39.44871794871795,
"alnum_prop": 0.6103347416314592,
"repo_name": "Fokko/incubator-airflow",
"id": "569c34d5cbc0358f65c6c7e712a6e34b0e0e7788",
"size": "6965",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airflow/utils/log/s3_task_handler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "14170"
},
{
"name": "HTML",
"bytes": "145596"
},
{
"name": "JavaScript",
"bytes": "25233"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "8787104"
},
{
"name": "Shell",
"bytes": "187296"
},
{
"name": "TSQL",
"bytes": "879"
}
],
"symlink_target": ""
}
|
from sympy import Basic as SympyBasic, I, exp, sqrt
from .core.circuit_algebra import (
CIdentity, CPermutation, Circuit, Concatenation, Feedback, SLH,
SeriesInverse, SeriesProduct, circuit_identity as cid)
from .core.exceptions import CannotSimplify, IncompatibleBlockStructures
from .core.hilbert_space_algebra import (
HilbertSpace, LocalSpace, ProductSpace, TrivialSpace)
from .core.operator_algebra import (
Adjoint, Commutator, IdentityOperator, LocalOperator,
LocalSigma, Operator, OperatorIndexedSum, OperatorPlus, OperatorTimes,
OperatorTrace, PseudoInverse, ScalarTimesOperator, ZeroOperator,
decompose_space, factor_for_trace)
from .library.spin_algebra import (
Jz, Jplus, Jminus, Jpjmcoeff, Jzjmcoeff, Jmjmcoeff)
from .library.fock_operators import (
Destroy, Create, Phase,
Displace, Squeeze)
from .core.scalar_algebra import (
Scalar, ScalarExpression, ScalarValue, ScalarPlus, ScalarTimes,
ScalarPower, ScalarIndexedSum, Zero, One, KroneckerDelta)
from .core.state_algebra import (
BasisKet, Bra, BraKet, CoherentStateKet, State, KetBra, KetIndexedSum,
KetPlus, LocalKet, OperatorTimesKet, ScalarTimesKet, TensorKet, TrivialKet,
ZeroKet, KetSymbol)
from .core.super_operator_algebra import (
IdentitySuperOperator, SPost, SPre, ScalarTimesSuperOperator, SuperAdjoint,
SuperOperator, SuperOperatorPlus, SuperOperatorTimes,
SuperOperatorTimesOperator, ZeroSuperOperator)
from .pattern_matching import pattern, pattern_head, wc
from ..utils.check_rules import check_rules_dict
from ..utils.indices import IndexRangeBase, SymbolicLabelBase
from ..utils.permutations import concatenate_permutations
SCALAR_TYPES = (Scalar, ) + Scalar._val_types
SCALAR_VAL_TYPES = (ScalarValue, ) + Scalar._val_types
# Scalar Rules
def _algebraic_rules_scalar():
"""Set the default algebraic rules for scalars"""
a = wc("a", head=SCALAR_VAL_TYPES)
b = wc("b", head=SCALAR_VAL_TYPES)
x = wc("x", head=SCALAR_TYPES)
y = wc("y", head=SCALAR_TYPES)
z = wc("z", head=SCALAR_TYPES)
indranges__ = wc("indranges__", head=IndexRangeBase)
ScalarTimes._binary_rules.update(check_rules_dict([
('R001', (
pattern_head(a, b),
lambda a, b: a * b)),
('R002', (
pattern_head(x, x),
lambda x: x**2)),
('R003', (
pattern_head(Zero, x),
lambda x: Zero)),
('R004', (
pattern_head(x, Zero),
lambda x: Zero)),
('R005', (
pattern_head(
pattern(ScalarPower, x, y),
pattern(ScalarPower, x, z)),
lambda x, y, z: x**(y+z))),
('R006', (
pattern_head(x, pattern(ScalarPower, x, -1)),
lambda x: One)),
]))
ScalarPower._rules.update(check_rules_dict([
('R001', (
pattern_head(a, b),
lambda a, b: a**b)),
('R002', (
pattern_head(x, 0),
lambda x: One)),
('R003', (
pattern_head(x, 1),
lambda x: x)),
('R004', (
pattern_head(pattern(ScalarPower, x, y), z),
lambda x, y, z: x**(y*z))),
]))
def pull_constfactor_from_sum(x, y, indranges):
bound_symbols = set([r.index_symbol for r in indranges])
if len(x.free_symbols.intersection(bound_symbols)) == 0:
return x * ScalarIndexedSum.create(y, *indranges)
else:
raise CannotSimplify()
ScalarIndexedSum._rules.update(check_rules_dict([
('R001', ( # sum over zero -> zero
pattern_head(Zero, indranges__),
lambda indranges: Zero)),
('R002', ( # pull constant prefactor out of sum
pattern_head(pattern(ScalarTimes, x, y), indranges__),
lambda x, y, indranges:
pull_constfactor_from_sum(x, y, indranges))),
]))
# Operator rules
def _algebraic_rules_operator():
"""Set the default algebraic rules for the operations defined in this
module"""
u = wc("u", head=SCALAR_TYPES)
v = wc("v", head=SCALAR_TYPES)
n = wc("n", head=(int, str, SymbolicLabelBase))
m = wc("m", head=(int, str, SymbolicLabelBase))
A = wc("A", head=Operator)
B = wc("B", head=Operator)
A_plus = wc("A", head=OperatorPlus)
A_times = wc("A", head=OperatorTimes)
ls = wc("ls", head=LocalSpace)
h1 = wc("h1", head=HilbertSpace)
H_ProductSpace = wc("H", head=ProductSpace)
localsigma = wc(
'localsigma', head=LocalSigma, kwargs={'hs': ls})
ra = wc("ra", head=(int, str, SymbolicLabelBase))
rb = wc("rb", head=(int, str, SymbolicLabelBase))
rc = wc("rc", head=(int, str, SymbolicLabelBase))
rd = wc("rd", head=(int, str, SymbolicLabelBase))
indranges__ = wc("indranges__", head=IndexRangeBase)
ScalarTimesOperator._rules.update(check_rules_dict([
('R001', (
pattern_head(1, A),
lambda A: A)),
('R002', (
pattern_head(0, A),
lambda A: ZeroOperator)),
('R003', (
pattern_head(u, ZeroOperator),
lambda u: ZeroOperator)),
('R004', (
pattern_head(u, pattern(ScalarTimesOperator, v, A)),
lambda u, v, A: (u * v) * A)),
('R005', (
pattern_head(-1, A_plus),
lambda A: OperatorPlus.create(*[-1 * op for op in A.args]))),
]))
OperatorTimes._binary_rules.update(check_rules_dict([
('R001', (
pattern_head(pattern(ScalarTimesOperator, u, A), B),
lambda u, A, B: u * (A * B))),
('R002', (
pattern_head(ZeroOperator, B),
lambda B: ZeroOperator)),
('R003', (
pattern_head(A, ZeroOperator),
lambda A: ZeroOperator)),
('R004', (
pattern_head(A, pattern(ScalarTimesOperator, u, B)),
lambda A, u, B: u * (A * B))),
('R005', (
pattern_head(
pattern(LocalSigma, ra, rb, hs=ls),
pattern(LocalSigma, rc, rd, hs=ls)),
lambda ls, ra, rb, rc, rd: (
KroneckerDelta(
BasisKet(rb, hs=ls).index,
BasisKet(rc, hs=ls).index) *
LocalSigma.create(ra, rd, hs=ls)))),
# Harmonic oscillator rules
('R009', (
pattern_head(pattern(Create, hs=ls), localsigma),
lambda ls, localsigma:
sqrt(localsigma.index_j + 1) * localsigma.raise_jk(j_incr=1))),
('R010', (
pattern_head(pattern(Destroy, hs=ls), localsigma),
lambda ls, localsigma:
sqrt(localsigma.index_j) * localsigma.raise_jk(j_incr=-1))),
('R011', (
pattern_head(localsigma, pattern(Destroy, hs=ls)),
lambda ls, localsigma:
sqrt(localsigma.index_k + 1) * localsigma.raise_jk(k_incr=1))),
('R012', (
pattern_head(localsigma, pattern(Create, hs=ls)),
lambda ls, localsigma:
sqrt(localsigma.index_k) * localsigma.raise_jk(k_incr=-1))),
# Normal ordering for harmonic oscillator <=> all a^* to the left, a to
# the right.
('R013', (
pattern_head(pattern(Destroy, hs=ls), pattern(Create, hs=ls)),
lambda ls: IdentityOperator + Create(hs=ls) * Destroy(hs=ls))),
# Oscillator unitary group rules
('R014', (
pattern_head(pattern(Phase, u, hs=ls), pattern(Phase, v, hs=ls)),
lambda ls, u, v: Phase.create(u + v, hs=ls))),
('R015', (
pattern_head(
pattern(Displace, u, hs=ls),
pattern(Displace, v, hs=ls)),
lambda ls, u, v: (
exp((u * v.conjugate() - u.conjugate() * v) / 2) *
Displace.create(u + v, hs=ls)))),
('R016', (
pattern_head(pattern(Destroy, hs=ls), pattern(Phase, u, hs=ls)),
lambda ls, u:
exp(I * u) * Phase.create(u, hs=ls) * Destroy(hs=ls))),
('R017', (
pattern_head(pattern(Destroy, hs=ls), pattern(Displace, u, hs=ls)),
lambda ls, u: Displace.create(u, hs=ls) * (Destroy(hs=ls) + u))),
('R018', (
pattern_head(pattern(Phase, u, hs=ls), pattern(Create, hs=ls)),
lambda ls, u:
exp(I * u) * Create(hs=ls) * Phase.create(u, hs=ls))),
('R019', (
pattern_head(pattern(Displace, u, hs=ls), pattern(Create, hs=ls)),
lambda ls, u: (((Create(hs=ls) - u.conjugate()) *
Displace.create(u, hs=ls))))),
('R020', (
pattern_head(pattern(Phase, u, hs=ls), localsigma),
lambda ls, u, localsigma:
exp(I * u * localsigma.index_j) * localsigma)),
('R021', (
pattern_head(localsigma, pattern(Phase, u, hs=ls)),
lambda ls, u, localsigma:
exp(I * u * localsigma.index_k) * localsigma)),
# Spin rules
('R022', (
pattern_head(pattern(Jplus, hs=ls), localsigma),
lambda ls, localsigma:
Jpjmcoeff(ls, localsigma.index_j, shift=True) *
localsigma.raise_jk(j_incr=1))),
('R023', (
pattern_head(pattern(Jminus, hs=ls), localsigma),
lambda ls, localsigma:
Jmjmcoeff(ls, localsigma.index_j, shift=True) *
localsigma.raise_jk(j_incr=-1))),
('R024', (
pattern_head(pattern(Jz, hs=ls), localsigma),
lambda ls, localsigma:
Jzjmcoeff(ls, localsigma.index_j, shift=True) * localsigma)),
('R025', (
pattern_head(localsigma, pattern(Jplus, hs=ls)),
lambda ls, localsigma:
Jmjmcoeff(ls, localsigma.index_k, shift=True) *
localsigma.raise_jk(k_incr=-1))),
('R026', (
pattern_head(localsigma, pattern(Jminus, hs=ls)),
lambda ls, localsigma:
Jpjmcoeff(ls, localsigma.index_k, shift=True) *
localsigma.raise_jk(k_incr=+1))),
('R027', (
pattern_head(localsigma, pattern(Jz, hs=ls)),
lambda ls, localsigma:
Jzjmcoeff(ls, localsigma.index_k, shift=True) * localsigma)),
# Normal ordering for angular momentum <=> all J_+ to the left, J_z to
# center and J_- to the right
('R028', (
pattern_head(pattern(Jminus, hs=ls), pattern(Jplus, hs=ls)),
lambda ls: -2 * Jz(hs=ls) + Jplus(hs=ls) * Jminus(hs=ls))),
('R029', (
pattern_head(pattern(Jminus, hs=ls), pattern(Jz, hs=ls)),
lambda ls: Jz(hs=ls) * Jminus(hs=ls) + Jminus(hs=ls))),
('R030', (
pattern_head(pattern(Jz, hs=ls), pattern(Jplus, hs=ls)),
lambda ls: Jplus(hs=ls) * Jz(hs=ls) + Jplus(hs=ls))),
]))
Displace._rules.update(check_rules_dict([
('R001', (
pattern_head(0, hs=ls), lambda ls: IdentityOperator))
]))
Phase._rules.update(check_rules_dict([
('R001', (
pattern_head(0, hs=ls), lambda ls: IdentityOperator))
]))
Squeeze._rules.update(check_rules_dict([
('R001', (
pattern_head(0, hs=ls), lambda ls: IdentityOperator))
]))
OperatorTrace._rules.update(check_rules_dict([
('R001', (
pattern_head(A, over_space=TrivialSpace),
lambda A: A)),
('R002', (
pattern_head(ZeroOperator, over_space=h1),
lambda h1: ZeroOperator)),
('R003', (
pattern_head(IdentityOperator, over_space=h1),
lambda h1: h1.dimension * IdentityOperator)),
('R004', (
pattern_head(A_plus, over_space=h1),
lambda h1, A: OperatorPlus.create(
*[OperatorTrace.create(o, over_space=h1)
for o in A.operands]))),
('R005', (
pattern_head(pattern(Adjoint, A), over_space=h1),
lambda h1, A: Adjoint.create(
OperatorTrace.create(A, over_space=h1)))),
('R006', (
pattern_head(pattern(ScalarTimesOperator, u, A), over_space=h1),
lambda h1, u, A: u * OperatorTrace.create(A, over_space=h1))),
('R007', (
pattern_head(A, over_space=H_ProductSpace),
lambda H, A: decompose_space(H, A))),
('R008', (
pattern_head(pattern(Create, hs=ls), over_space=ls),
lambda ls: ZeroOperator)),
('R009', (
pattern_head(pattern(Destroy, hs=ls), over_space=ls),
lambda ls: ZeroOperator)),
('R010', (
pattern_head(pattern(LocalSigma, n, m, hs=ls), over_space=ls),
lambda ls, n, m:
KroneckerDelta(
BasisKet(n, hs=ls).index,
BasisKet(m, hs=ls).index) *
IdentityOperator)),
('R011', (
pattern_head(A, over_space=ls),
lambda ls, A: factor_for_trace(ls, A))),
]))
Commutator._rules.update(check_rules_dict([
('R001', (
pattern_head(A, A), lambda A: ZeroOperator)),
('R002', (
pattern_head(
pattern(ScalarTimesOperator, u, A),
pattern(ScalarTimesOperator, v, B)),
lambda u, v, A, B: u * v * Commutator.create(A, B))),
('R003', (
pattern_head(pattern(ScalarTimesOperator, v, A), B),
lambda v, A, B: v * Commutator.create(A, B))),
('R004', (
pattern_head(A, pattern(ScalarTimesOperator, v, B)),
lambda v, A, B: v * Commutator.create(A, B))),
# special known commutators
('R005', (
pattern_head(pattern(Create, hs=ls), pattern(Destroy, hs=ls)),
lambda ls: ScalarTimesOperator(-1, IdentityOperator))),
# the remaining rules basically defer to OperatorTimes; just writing
# out the commutator will generate something simple
('R006', (
pattern_head(
wc('A', head=(
Create, Destroy, LocalSigma, Phase, Displace)),
wc('B', head=(
Create, Destroy, LocalSigma, Phase, Displace))),
lambda A, B: A * B - B * A)),
('R007', (
pattern_head(
wc('A', head=(LocalSigma, Jplus, Jminus, Jz)),
wc('B', head=(LocalSigma, Jplus, Jminus, Jz))),
lambda A, B: A * B - B * A)),
]))
def pull_constfactor_from_sum(u, A, indranges):
bound_symbols = set([r.index_symbol for r in indranges])
if len(u.free_symbols.intersection(bound_symbols)) == 0:
return u * OperatorIndexedSum.create(A, *indranges)
else:
raise CannotSimplify()
OperatorIndexedSum._rules.update(check_rules_dict([
('R001', ( # sum over zero -> zero
pattern_head(ZeroOperator, indranges__),
lambda indranges: ZeroOperator)),
('R002', ( # pull constant prefactor out of sum
pattern_head(pattern(ScalarTimesOperator, u, A), indranges__),
lambda u, A, indranges:
pull_constfactor_from_sum(u, A, indranges))),
]))
# Super-Operator rules
def _algebraic_rules_superop():
u = wc("u", head=SCALAR_TYPES)
v = wc("v", head=SCALAR_TYPES)
A = wc("A", head=Operator)
B = wc("B", head=Operator)
C = wc("C", head=Operator)
sA = wc("sA", head=SuperOperator)
sA__ = wc("sA__", head=SuperOperator)
sB = wc("sB", head=SuperOperator)
sA_plus = wc("sA", head=SuperOperatorPlus)
sA_times = wc("sA", head=SuperOperatorTimes)
ScalarTimesSuperOperator._rules.update(check_rules_dict([
('R001', (
pattern_head(1, sA),
lambda sA: sA)),
('R002', (
pattern_head(0, sA),
lambda sA: ZeroSuperOperator)),
('R003', (
pattern_head(u, ZeroSuperOperator),
lambda u: ZeroSuperOperator)),
('R004', (
pattern_head(u, pattern(ScalarTimesSuperOperator, v, sA)),
lambda u, v, sA: (u * v) * sA)),
]))
SuperOperatorTimes._binary_rules.update(check_rules_dict([
('R001', (
pattern_head(pattern(ScalarTimesSuperOperator, u, sA), sB),
lambda u, sA, sB: u * (sA * sB))),
('R002', (
pattern_head(sA, pattern(ScalarTimesSuperOperator, u, sB)),
lambda sA, u, sB: u * (sA * sB))),
('R003', (
pattern_head(pattern(SPre, A), pattern(SPre, B)),
lambda A, B: SPre.create(A*B))),
('R004', (
pattern_head(pattern(SPost, A), pattern(SPost, B)),
lambda A, B: SPost.create(B*A))),
]))
SPre._rules.update(check_rules_dict([
('R001', (
pattern_head(pattern(ScalarTimesOperator, u, A)),
lambda u, A: u * SPre.create(A))),
('R002', (
pattern_head(IdentityOperator),
lambda: IdentitySuperOperator)),
('R003', (
pattern_head(ZeroOperator),
lambda: ZeroSuperOperator)),
]))
SPost._rules.update(check_rules_dict([
('R001', (
pattern_head(pattern(ScalarTimesOperator, u, A)),
lambda u, A: u * SPost.create(A))),
('R002', (
pattern_head(IdentityOperator),
lambda: IdentitySuperOperator)),
('R003', (
pattern_head(ZeroOperator),
lambda: ZeroSuperOperator)),
]))
SuperOperatorTimesOperator._rules.update(check_rules_dict([
('R001', (
pattern_head(sA_plus, B),
lambda sA, B:
OperatorPlus.create(*[o*B for o in sA.operands]))),
('R002', (
pattern_head(IdentitySuperOperator, B),
lambda B: B)),
('R003', (
pattern_head(ZeroSuperOperator, B),
lambda B: ZeroOperator)),
('R004', (
pattern_head(pattern(ScalarTimesSuperOperator, u, sA), B),
lambda u, sA, B: u * (sA * B))),
('R005', (
pattern_head(sA, pattern(ScalarTimesOperator, u, B)),
lambda u, sA, B: u * (sA * B))),
('R006', (
pattern_head(sA, pattern(SuperOperatorTimesOperator, sB, C)),
lambda sA, sB, C: (sA * sB) * C)),
('R007', (
pattern_head(pattern(SPre, A), B),
lambda A, B: A*B)),
('R008', (
pattern_head(
pattern(
SuperOperatorTimes, sA__, wc('sB', head=(SPost, SPre))),
C),
lambda sA, sB, C: (
SuperOperatorTimes.create(*sA) * (sB * C)))),
('R009', (
pattern_head(pattern(SPost, A), B),
lambda A, B: B*A)),
]))
# State rules
def act_locally(op, ket):
ket_on, ket_off = ket.factor_for_space(op.space)
if ket_off != TrivialKet:
return (op * ket_on) * ket_off
raise CannotSimplify()
def act_locally_times_tensor(op, ket):
local_spaces = op.space.local_factors
for spc in local_spaces:
while spc < ket.space:
op_on, op_off = op.factor_for_space(spc)
ket_on, ket_off = ket.factor_for_space(spc)
if (op_on.space <= ket_on.space and
op_off.space <= ket_off.space and ket_off != TrivialKet):
return (op_on * ket_on) * (op_off * ket_off)
else:
spc = op_on.space * ket_on.space
raise CannotSimplify()
def tensor_decompose_kets(a, b, operation):
full_space = a.space * b.space
local_spaces = full_space.local_factors
for spc in local_spaces:
while spc < full_space:
a_on, a_off = a.factor_for_space(spc)
b_on, b_off = b.factor_for_space(spc)
if (a_on.space == b_on.space and a_off.space == b_off.space and
a_off != TrivialKet):
return operation(a_on, b_on) * operation(a_off, b_off)
else:
spc = a_on.space * b_on.space
raise CannotSimplify()
def _algebraic_rules_state():
"""Set the default algebraic rules for the operations defined in this
module"""
u = wc("u", head=SCALAR_TYPES)
v = wc("v", head=SCALAR_TYPES)
n = wc("n", head=(int, str, SymbolicLabelBase))
m = wc("m", head=(int, str, SymbolicLabelBase))
k = wc("k", head=(int, str, SymbolicLabelBase))
A = wc("A", head=Operator)
A__ = wc("A__", head=Operator)
B = wc("B", head=Operator)
A_times = wc("A", head=OperatorTimes)
A_local = wc("A", head=LocalOperator)
B_local = wc("B", head=LocalOperator)
Psi_sym = wc("Psi", head=KetSymbol)
Psi = wc("Psi", head=State)
Phi = wc("Phi", head=State)
Psi_local = wc("Psi", head=LocalKet)
Psi_tensor = wc("Psi", head=TensorKet)
Phi_tensor = wc("Phi", head=TensorKet)
ls = wc("ls", head=LocalSpace)
basisket = wc('basisket', BasisKet, kwargs={'hs': ls})
ket_a = wc('a', BasisKet)
ket_b = wc('b', BasisKet)
indranges__ = wc("indranges__", head=IndexRangeBase)
sum = wc('sum', head=KetIndexedSum)
sum2 = wc('sum2', head=KetIndexedSum)
ScalarTimesKet._rules.update(check_rules_dict([
('R001', (
pattern_head(1, Psi),
lambda Psi: Psi)),
('R002', (
pattern_head(0, Psi),
lambda Psi: ZeroKet)),
('R003', (
pattern_head(u, ZeroKet),
lambda u: ZeroKet)),
('R004', (
pattern_head(u, pattern(ScalarTimesKet, v, Psi)),
lambda u, v, Psi: (u * v) * Psi))
]))
def local_rule(A, B, Psi):
return OperatorTimes.create(*A) * (B * Psi)
OperatorTimesKet._rules.update(check_rules_dict([
('R001', ( # Id * Psi = Psi
pattern_head(IdentityOperator, Psi),
lambda Psi: Psi)),
('R002', ( # 0 * Psi = 0
pattern_head(ZeroOperator, Psi),
lambda Psi: ZeroKet)),
('R003', ( # A * 0 = 0
pattern_head(A, ZeroKet),
lambda A: ZeroKet)),
('R004', ( # A * v * Psi = v * A * Psi (pull out scalar)
pattern_head(A, pattern(ScalarTimesKet, v, Psi)),
lambda A, v, Psi: v * (A * Psi))),
('R005', ( # |n><m| * |k> = delta_mk * |n>
pattern_head(
pattern(LocalSigma, n, m, hs=ls),
pattern(BasisKet, k, hs=ls)),
lambda ls, n, m, k:
KroneckerDelta(
BasisKet(m, hs=ls).index, BasisKet(k, hs=ls).index) *
BasisKet(n, hs=ls))),
# harmonic oscillator
('R006', ( # a^+ |n> = sqrt(n+1) * |n+1>
pattern_head(pattern(Create, hs=ls), basisket),
lambda basisket, ls:
sqrt(basisket.index + 1) * basisket.next())),
('R007', ( # a |n> = sqrt(n) * |n-1>
pattern_head(pattern(Destroy, hs=ls), basisket),
lambda basisket, ls:
sqrt(basisket.index) * basisket.prev())),
('R008', ( # a |alpha> = alpha * |alpha> (eigenstate of annihilator)
pattern_head(
pattern(Destroy, hs=ls),
pattern(CoherentStateKet, u, hs=ls)),
lambda ls, u: u * CoherentStateKet(u, hs=ls))),
# spin
('R009', (
pattern_head(pattern(Jplus, hs=ls), basisket),
lambda basisket, ls:
Jpjmcoeff(basisket.space, basisket.index, shift=True) *
basisket.next())),
('R010', (
pattern_head(pattern(Jminus, hs=ls), basisket),
lambda basisket, ls:
Jmjmcoeff(basisket.space, basisket.index, shift=True) *
basisket.prev())),
('R011', (
pattern_head(pattern(Jz, hs=ls), basisket),
lambda basisket, ls:
Jzjmcoeff(basisket.space, basisket.index, shift=True) *
basisket)),
('R012', (
pattern_head(A_local, Psi_tensor),
lambda A, Psi: act_locally(A, Psi))),
('R013', (
pattern_head(A_times, Psi_tensor),
lambda A, Psi: act_locally_times_tensor(A, Psi))),
('R014', (
pattern_head(A, pattern(OperatorTimesKet, B, Psi)),
lambda A, B, Psi: (
(A * B) * Psi
if (B * Psi) == OperatorTimesKet(B, Psi)
else A * (B * Psi)))),
('R015', (
pattern_head(pattern(OperatorTimes, A__, B_local), Psi_local),
local_rule)),
('R016', (
pattern_head(pattern(ScalarTimesOperator, u, A), Psi),
lambda u, A, Psi: u * (A * Psi))),
('R017', (
pattern_head(
pattern(Displace, u, hs=ls),
pattern(BasisKet, 0, hs=ls)),
lambda ls, u: CoherentStateKet(u, hs=ls))),
('R018', (
pattern_head(
pattern(Displace, u, hs=ls),
pattern(CoherentStateKet, v, hs=ls)),
lambda ls, u, v:
((Displace(u, hs=ls) * Displace(v, hs=ls)) *
BasisKet(0, hs=ls)))),
('R019', (
pattern_head(
pattern(Phase, u, hs=ls), pattern(BasisKet, m, hs=ls)),
lambda ls, u, m: exp(I * u * m) * BasisKet(m, hs=ls))),
('R020', (
pattern_head(
pattern(Phase, u, hs=ls),
pattern(CoherentStateKet, v, hs=ls)),
lambda ls, u, v: CoherentStateKet(v * exp(I * u), hs=ls))),
('R021', (
pattern_head(A, sum),
lambda A, sum: KetIndexedSum.create(A * sum.term, *sum.ranges))),
]))
TensorKet._binary_rules.update(check_rules_dict([
('R001', (
pattern_head(pattern(ScalarTimesKet, u, Psi), Phi),
lambda u, Psi, Phi: u * (Psi * Phi))),
('R002', (
pattern_head(Psi, pattern(ScalarTimesKet, u, Phi)),
lambda Psi, u, Phi: u * (Psi * Phi))),
('R003', ( # delegate to __mul__
pattern_head(sum, sum2),
lambda sum, sum2: sum * sum2)),
('R004', ( # delegate to __mul__
pattern_head(Psi, sum),
lambda Psi, sum: Psi * sum)),
('R005', ( # delegate to __mul__
pattern_head(sum, Psi),
lambda sum, Psi: sum * Psi)),
]))
BraKet._rules.update(check_rules_dict([
# All rules must result in scalars or objects in the TrivialSpace
('R001', (
pattern_head(Phi, ZeroKet),
lambda Phi: Zero)),
('R002', (
pattern_head(ZeroKet, Phi),
lambda Phi: Zero)),
('R003', (
pattern_head(ket_a, ket_b),
lambda a, b: KroneckerDelta(a.index, b.index))),
('R004', (
pattern_head(Psi_sym, Psi_sym),
lambda Psi: One)),
# we're assuming every KetSymbol is normalized. If we ever want
# to allow non-normalized states, the best thing to dou would be to
# add a `norm` attribute
('R005', (
pattern_head(Psi_tensor, Phi_tensor),
lambda Psi, Phi: tensor_decompose_kets(Psi, Phi, BraKet.create))),
('R006', (
pattern_head(pattern(ScalarTimesKet, u, Psi), Phi),
lambda u, Psi, Phi: u.conjugate() * (Psi.adjoint() * Phi))),
('R007', (
pattern_head(pattern(OperatorTimesKet, A, Psi), Phi),
lambda A, Psi, Phi: (Psi.adjoint() * (A.dag() * Phi)))),
('R008', (
pattern_head(Psi, pattern(ScalarTimesKet, u, Phi)),
lambda Psi, u, Phi: u * (Psi.adjoint() * Phi))),
('R009', ( # delegate to __mul__
pattern_head(sum, sum2),
lambda sum, sum2: Bra.create(sum) * sum2)),
('R010', ( # delegate to __mul__
pattern_head(Psi, sum),
lambda Psi, sum: Bra.create(Psi) * sum)),
('R011', ( # delegate to __mul__
pattern_head(sum, Psi),
lambda sum, Psi: Bra.create(sum) * Psi)),
]))
KetBra._rules.update(check_rules_dict([
('R001', (
pattern_head(
pattern(BasisKet, m, hs=ls),
pattern(BasisKet, n, hs=ls)),
lambda ls, m, n: LocalSigma(m, n, hs=ls))),
('R002', (
pattern_head(pattern(CoherentStateKet, u, hs=ls), Phi),
lambda ls, u, Phi: (
Displace(u, hs=ls) * (BasisKet(0, hs=ls) * Phi.adjoint())))),
('R003', (
pattern_head(Phi, pattern(CoherentStateKet, u, hs=ls)),
lambda ls, u, Phi: (
(Phi * BasisKet(0, hs=ls).adjoint()) * Displace(-u, hs=ls)))),
('R004', (
pattern_head(Psi_tensor, Phi_tensor),
lambda Psi, Phi: tensor_decompose_kets(Psi, Phi, KetBra.create))),
('R005', (
pattern_head(pattern(OperatorTimesKet, A, Psi), Phi),
lambda A, Psi, Phi: A * (Psi * Phi.adjoint()))),
('R006', (
pattern_head(Psi, pattern(OperatorTimesKet, A, Phi)),
lambda Psi, A, Phi: (Psi * Phi.adjoint()) * A.adjoint())),
('R007', (
pattern_head(pattern(ScalarTimesKet, u, Psi), Phi),
lambda u, Psi, Phi: u * (Psi * Phi.adjoint()))),
('R008', (
pattern_head(Psi, pattern(ScalarTimesKet, u, Phi)),
lambda Psi, u, Phi: u.conjugate() * (Psi * Phi.adjoint()))),
('R009', ( # delegate to __mul__
pattern_head(sum, sum2),
lambda sum, sum2: sum * Bra.create(sum2))),
('R010', ( # delegate to __mul__
pattern_head(Psi, sum),
lambda Psi, sum: Psi * Bra.create(sum))),
('R011', ( # delegate to __mul__
pattern_head(sum, Psi),
lambda sum, Psi: sum * Bra.create(Psi))),
]))
def pull_constfactor_from_sum(u, Psi, indranges):
bound_symbols = set([r.index_symbol for r in indranges])
if len(u.free_symbols.intersection(bound_symbols)) == 0:
return u * KetIndexedSum.create(Psi, *indranges)
else:
raise CannotSimplify()
KetIndexedSum._rules.update(check_rules_dict([
('R001', ( # sum over zero -> zero
pattern_head(ZeroKet, indranges__),
lambda indranges: ZeroKet)),
('R002', ( # pull constant prefactor out of sum
pattern_head(pattern(ScalarTimesKet, u, Psi), indranges__),
lambda u, Psi, indranges:
pull_constfactor_from_sum(u, Psi, indranges))),
]))
# Circuit rules
def _get_common_block_structure(lhs_bs, rhs_bs):
"""For two block structures ``aa = (a1, a2, ..., an)``, ``bb = (b1, b2,
..., bm)`` generate the maximal common block structure so that every block
from aa and bb is contained in exactly one block of the resulting
structure. This is useful for determining how to apply the distributive
law when feeding two concatenated Circuit objects into each other.
Examples:
``(1, 1, 1), (2, 1) -> (2, 1)``
``(1, 1, 2, 1), (2, 1, 2) -> (2, 3)``
Args:
lhs_bs (tuple): first block structure
rhs_bs (tuple): second block structure
"""
# for convenience the arguments may also be Circuit objects
if isinstance(lhs_bs, Circuit):
lhs_bs = lhs_bs.block_structure
if isinstance(rhs_bs, Circuit):
rhs_bs = rhs_bs.block_structure
if sum(lhs_bs) != sum(rhs_bs):
raise IncompatibleBlockStructures(
'Blockstructures have different total channel numbers.')
if len(lhs_bs) == len(rhs_bs) == 0:
return ()
i = j = 1
lsum = 0
while True:
lsum = sum(lhs_bs[:i])
rsum = sum(rhs_bs[:j])
if lsum < rsum:
i += 1
elif rsum < lsum:
j += 1
else:
break
return (lsum, ) + _get_common_block_structure(lhs_bs[i:], rhs_bs[j:])
def _tensor_decompose_series(lhs, rhs):
"""Simplification method for lhs << rhs
Decompose a series product of two reducible circuits with compatible block
structures into a concatenation of individual series products between
subblocks. This method raises CannotSimplify when rhs is a CPermutation in
order not to conflict with other _rules.
"""
if isinstance(rhs, CPermutation):
raise CannotSimplify()
lhs_structure = lhs.block_structure
rhs_structure = rhs.block_structure
res_struct = _get_common_block_structure(lhs_structure, rhs_structure)
if len(res_struct) > 1:
blocks, oblocks = (
lhs.get_blocks(res_struct),
rhs.get_blocks(res_struct))
parallel_series = [SeriesProduct.create(lb, rb)
for (lb, rb) in zip(blocks, oblocks)]
return Concatenation.create(*parallel_series)
raise CannotSimplify()
def _factor_permutation_for_blocks(cperm, rhs):
"""Simplification method for cperm << rhs.
Decompose a series product of a channel permutation and a reducible circuit
with appropriate block structure by decomposing the permutation into a
permutation within each block of rhs and a block permutation and a residual
part. This allows for achieving something close to a normal form for
circuit expression.
"""
rbs = rhs.block_structure
if rhs == cid(rhs.cdim):
return cperm
if len(rbs) > 1:
residual_lhs, transformed_rhs, carried_through_lhs \
= cperm._factorize_for_rhs(rhs)
if residual_lhs == cperm:
raise CannotSimplify()
return SeriesProduct.create(residual_lhs, transformed_rhs,
carried_through_lhs)
raise CannotSimplify()
def _pull_out_perm_lhs(lhs, rest, out_port, in_port):
"""Pull out a permutation from the Feedback of a SeriesProduct with itself.
Args:
lhs (CPermutation): The permutation circuit
rest (tuple): The other SeriesProduct operands
out_port (int): The feedback output port index
in_port (int): The feedback input port index
Returns:
Circuit: The simplified circuit
"""
out_inv, lhs_red = lhs._factor_lhs(out_port)
return lhs_red << Feedback.create(SeriesProduct.create(*rest),
out_port=out_inv, in_port=in_port)
def _pull_out_unaffected_blocks_lhs(lhs, rest, out_port, in_port):
"""In a self-Feedback of a series product, where the left-most operand is
reducible, pull all non-trivial blocks outside of the feedback.
Args:
lhs (Circuit): The reducible circuit
rest (tuple): The other SeriesProduct operands
out_port (int): The feedback output port index
in_port (int): The feedback input port index
Returns:
Circuit: The simplified circuit
"""
_, block_index = lhs.index_in_block(out_port)
bs = lhs.block_structure
nbefore, nblock, nafter = (sum(bs[:block_index]),
bs[block_index],
sum(bs[block_index + 1:]))
before, block, after = lhs.get_blocks((nbefore, nblock, nafter))
if before != cid(nbefore) or after != cid(nafter):
outer_lhs = before + cid(nblock - 1) + after
inner_lhs = cid(nbefore) + block + cid(nafter)
return outer_lhs << Feedback.create(
SeriesProduct.create(inner_lhs, *rest),
out_port=out_port, in_port=in_port)
elif block == cid(nblock):
outer_lhs = before + cid(nblock - 1) + after
return outer_lhs << Feedback.create(
SeriesProduct.create(*rest),
out_port=out_port, in_port=in_port)
raise CannotSimplify()
def _pull_out_perm_rhs(rest, rhs, out_port, in_port):
"""Similar to :func:`_pull_out_perm_lhs` but on the RHS of a series
product self-feedback."""
in_im, rhs_red = rhs._factor_rhs(in_port)
return (Feedback.create(
SeriesProduct.create(*rest),
out_port=out_port, in_port=in_im) << rhs_red)
def _pull_out_unaffected_blocks_rhs(rest, rhs, out_port, in_port):
"""Similar to :func:`_pull_out_unaffected_blocks_lhs` but on the RHS of a
series product self-feedback.
"""
_, block_index = rhs.index_in_block(in_port)
rest = tuple(rest)
bs = rhs.block_structure
(nbefore, nblock, nafter) = (sum(bs[:block_index]),
bs[block_index],
sum(bs[block_index + 1:]))
before, block, after = rhs.get_blocks((nbefore, nblock, nafter))
if before != cid(nbefore) or after != cid(nafter):
outer_rhs = before + cid(nblock - 1) + after
inner_rhs = cid(nbefore) + block + cid(nafter)
return Feedback.create(SeriesProduct.create(*(rest + (inner_rhs,))),
out_port=out_port, in_port=in_port) << outer_rhs
elif block == cid(nblock):
outer_rhs = before + cid(nblock - 1) + after
return Feedback.create(SeriesProduct.create(*rest),
out_port=out_port, in_port=in_port) << outer_rhs
raise CannotSimplify()
def _series_feedback(series, out_port, in_port):
"""Invert a series self-feedback twice to get rid of unnecessary
permutations."""
series_s = series.series_inverse().series_inverse()
if series_s == series:
raise CannotSimplify()
return series_s.feedback(out_port=out_port, in_port=in_port)
def _algebraic_rules_circuit():
"""Set the default algebraic rules for the operations defined in this
module"""
A_CPermutation = wc("A", head=CPermutation)
B_CPermutation = wc("B", head=CPermutation)
C_CPermutation = wc("C", head=CPermutation)
D_CPermutation = wc("D", head=CPermutation)
A_Concatenation = wc("A", head=Concatenation)
B_Concatenation = wc("B", head=Concatenation)
A_SeriesProduct = wc("A", head=SeriesProduct)
A_Circuit = wc("A", head=Circuit)
B_Circuit = wc("B", head=Circuit)
C_Circuit = wc("C", head=Circuit)
A__Circuit = wc("A__", head=Circuit)
B__Circuit = wc("B__", head=Circuit)
C__Circuit = wc("C__", head=Circuit)
A_SLH = wc("A", head=SLH)
B_SLH = wc("B", head=SLH)
j_int = wc("j", head=int)
k_int = wc("k", head=int)
SeriesProduct._binary_rules.update(check_rules_dict([
('R001', (
pattern_head(A_CPermutation, B_CPermutation),
lambda A, B: A.series_with_permutation(B))),
('R002', (
pattern_head(A_SLH, B_SLH),
lambda A, B: A.series_with_slh(B))),
('R003', (
pattern_head(A_Circuit, B_Circuit),
lambda A, B: _tensor_decompose_series(A, B))),
('R004', (
pattern_head(A_CPermutation, B_Circuit),
lambda A, B: _factor_permutation_for_blocks(A, B))),
('R005', (
pattern_head(A_Circuit, pattern(SeriesInverse, A_Circuit)),
lambda A: cid(A.cdim))),
('R006', (
pattern_head(pattern(SeriesInverse, A_Circuit), A_Circuit),
lambda A: cid(A.cdim))),
]))
Concatenation._binary_rules.update(check_rules_dict([
('R001', (
pattern_head(A_SLH, B_SLH),
lambda A, B: A.concatenate_slh(B))),
('R002', (
pattern_head(A_CPermutation, B_CPermutation),
lambda A, B: CPermutation.create(
concatenate_permutations(A.permutation, B.permutation)))),
('R003', (
pattern_head(A_CPermutation, CIdentity),
lambda A: CPermutation.create(
concatenate_permutations(A.permutation, (0,))))),
('R004', (
pattern_head(CIdentity, B_CPermutation),
lambda B: CPermutation.create(
concatenate_permutations((0,), B.permutation)))),
('R005', (
pattern_head(
pattern(SeriesProduct, A__Circuit, B_CPermutation),
pattern(SeriesProduct, C__Circuit, D_CPermutation)),
lambda A, B, C, D: (
(SeriesProduct.create(*A) + SeriesProduct.create(*C)) <<
(B + D)))),
('R006', (
pattern_head(
pattern(SeriesProduct, A__Circuit, B_CPermutation), C_Circuit),
lambda A, B, C: (
(SeriesProduct.create(*A) + C) << (B + cid(C.cdim))))),
('R007', (
pattern_head(
A_Circuit, pattern(SeriesProduct, B__Circuit, C_CPermutation)),
lambda A, B, C: ((A + SeriesProduct.create(*B)) <<
(cid(A.cdim) + C)))),
]))
Feedback._rules.update(check_rules_dict([
('R001', (
pattern_head(A_SeriesProduct, out_port=j_int, in_port=k_int),
lambda A, j, k: _series_feedback(A, out_port=j, in_port=k))),
('R002', (
pattern_head(
pattern(SeriesProduct, A_CPermutation, B__Circuit),
out_port=j_int, in_port=k_int),
lambda A, B, j, k: _pull_out_perm_lhs(A, B, j, k))),
('R003', (
pattern_head(
pattern(SeriesProduct, A_Concatenation, B__Circuit),
out_port=j_int, in_port=k_int),
lambda A, B, j, k: _pull_out_unaffected_blocks_lhs(A, B, j, k))),
('R004', (
pattern_head(
pattern(SeriesProduct, A__Circuit, B_CPermutation),
out_port=j_int, in_port=k_int),
lambda A, B, j, k: _pull_out_perm_rhs(A, B, j, k))),
('R005', (
pattern_head(
pattern(SeriesProduct, A__Circuit, B_Concatenation),
out_port=j_int, in_port=k_int),
lambda A, B, j, k: _pull_out_unaffected_blocks_rhs(A, B, j, k))),
]))
def _algebraic_rules():
_algebraic_rules_scalar()
_algebraic_rules_operator()
_algebraic_rules_superop()
_algebraic_rules_state()
_algebraic_rules_circuit()
_algebraic_rules()
|
{
"content_hash": "d95d88c61815de660389766c57c081e6",
"timestamp": "",
"source": "github",
"line_count": 1124,
"max_line_length": 79,
"avg_line_length": 37.60943060498221,
"alnum_prop": 0.5368438483192581,
"repo_name": "mabuchilab/QNET",
"id": "8ea2dba7fac56a106549bf6b504939c3724dd3a2",
"size": "42273",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/qnet/algebra/_rules.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "3915"
},
{
"name": "Python",
"bytes": "1100786"
}
],
"symlink_target": ""
}
|
from .celery import app as celery_app
__version__ = "0.1.0"
|
{
"content_hash": "d49b22d9c33dfd6176e57e630cd3236d",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 37,
"avg_line_length": 20.333333333333332,
"alnum_prop": 0.6557377049180327,
"repo_name": "fladi/qraz",
"id": "f4cff8859c6696a2ee9188817f9d837e222cfa01",
"size": "108",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/qraz/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "1219"
},
{
"name": "HTML",
"bytes": "10836"
},
{
"name": "JavaScript",
"bytes": "3103"
},
{
"name": "Python",
"bytes": "39682"
}
],
"symlink_target": ""
}
|
import json
from .traversable import Traversable
def load(*args, **kwargs):
j = json.load(*args, **kwargs)
return Traversable(j)
def loads(*args, **kwargs):
j = json.loads(*args, **kwargs)
return Traversable(j)
def dump(obj, *args, **kwargs):
if isinstance(obj, Traversable):
obj = obj._obj
return json.dump(obj, *args, **kwargs)
def dumps(obj, *args, **kwargs):
if isinstance(obj, Traversable):
obj = obj._obj
return json.dumps(obj, *args, **kwargs)
def from_dict(jdict):
"""
Return a JSane Traversable object from a dict.
"""
return Traversable(jdict)
def from_object(obj):
"""
Return a JSane Traversable object from any object (e.g. a list).
"""
return Traversable(obj)
def new(kind=dict):
return Traversable(kind())
|
{
"content_hash": "813fbc0bd3b972c5d175a58fc07f9e9b",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 68,
"avg_line_length": 19.046511627906977,
"alnum_prop": 0.6275946275946276,
"repo_name": "skorokithakis/jsane",
"id": "eb846de8cdcbb6483b92914bc1fb915e72550fdb",
"size": "819",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jsane/wrapper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15068"
}
],
"symlink_target": ""
}
|
"""Project Euler - Problem 73 Module"""
from fractions import Fraction
from problem0071 import closest_neighbor_fraction
# Smallest Distance if 7 & new_d have no common devisors
# 2: 1/2
# 3: 1/3 & 2/3
# 4: 1/4 (2/4) 3/4
# 5: 1/5 2/5 3/5 4/5
# 6: 1/6 (2/6) (3/6) (4/6) 5/6
# -> Nr of values for d == phi(d)
# Only Count Fractions ]1/3,1/2[
def problem73(limit_d, lower_limit, upper_limit):
"""Problem 73 - Counting fractions in a range"""
result = 0
for d in range(2, limit_d+1):
right_lower = closest_neighbor_fraction(d, lower_limit, 1)
left_upper = closest_neighbor_fraction(d, upper_limit, -1)
for i in range(right_lower, left_upper+1):
# Only add Fractions that have not been added before
if Fraction(i, d).denominator == d: # This is clearly not efficient :)
result += 1
return result
def run():
"""Default Run Method"""
return problem73(12000, Fraction(1, 3), Fraction(1, 2))
if __name__ == '__main__':
print("Result: ", run())
|
{
"content_hash": "21a84835d885c5c5670f5c7bd89b9b99",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 82,
"avg_line_length": 29.571428571428573,
"alnum_prop": 0.6086956521739131,
"repo_name": "rado0x54/project-euler",
"id": "b77fd3849860fecf60b6f58f7d376fb4982318bc",
"size": "1058",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/problem0073.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "88368"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from nearpy.engine import Engine
|
{
"content_hash": "49eb9e64c8a9750b87d7473ab70ca8a1",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 38,
"avg_line_length": 24.333333333333332,
"alnum_prop": 0.7945205479452054,
"repo_name": "pombredanne/NearPy",
"id": "7dab4edfcd7410952b8d0517f926e4a571e848ad",
"size": "1126",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "nearpy/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "187032"
}
],
"symlink_target": ""
}
|
import asyncio
from unittest.mock import MagicMock
# local imports
from nautilus.network.events.consumers import KafkaBroker
loop = asyncio.get_event_loop()
def async_test(test_function):
"""
This decorator wraps a test function and executes it on the asyncio
event loop.
"""
def function(*args, **kwds):
# make sure the loop is open
# execute the test on the event loop
loop.run_until_complete(test_function(*args, **kwds))
# close the event loop
loop.stop()
return function
|
{
"content_hash": "2fb9cead498b2343f5b45869141446a5",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 75,
"avg_line_length": 25.09090909090909,
"alnum_prop": 0.6630434782608695,
"repo_name": "aaivazis/nautilus",
"id": "4a876115ccd7ec7fea48390657f4235c89d94caa",
"size": "571",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/util/decorators.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "15430"
},
{
"name": "Python",
"bytes": "103018"
}
],
"symlink_target": ""
}
|
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class BasecampAccount(ProviderAccount):
def get_avatar_url(self):
return None
def to_str(self):
dflt = super(BasecampAccount, self).to_str()
return self.account.extra_data.get("name", dflt)
class BasecampProvider(OAuth2Provider):
id = "basecamp"
name = "Basecamp"
account_class = BasecampAccount
def get_auth_params(self, request, action):
data = super(BasecampProvider, self).get_auth_params(request, action)
data["type"] = "web_server"
return data
def extract_uid(self, data):
data = data["identity"]
return str(data["id"])
def extract_common_fields(self, data):
data = data["identity"]
return dict(
email=data.get("email_address"),
username=data.get("email_address"),
first_name=data.get("first_name"),
last_name=data.get("last_name"),
name="%s %s" % (data.get("first_name"), data.get("last_name")),
)
provider_classes = [BasecampProvider]
|
{
"content_hash": "02dd7a992fe4734dc091c36667b925f9",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 77,
"avg_line_length": 30.076923076923077,
"alnum_prop": 0.6351236146632566,
"repo_name": "rsalmaso/django-allauth",
"id": "f2600a72afb10a9044421751c626fcc85f6390be",
"size": "1173",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "allauth/socialaccount/providers/basecamp/provider.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Emacs Lisp",
"bytes": "104"
},
{
"name": "HTML",
"bytes": "20404"
},
{
"name": "JavaScript",
"bytes": "3360"
},
{
"name": "Makefile",
"bytes": "396"
},
{
"name": "Python",
"bytes": "923713"
}
],
"symlink_target": ""
}
|
from nova.openstack.common import log as logging
from nova.openstack.common.plugin import plugin
LOG = logging.getLogger(__name__)
class _CallbackNotifier(object):
"""Manages plugin-defined notification callbacks.
For each Plugin, a CallbackNotifier will be added to the
notification driver list. Calls to notify() with appropriate
messages will be hooked and prompt callbacks.
A callback should look like this:
def callback(context, message, user_data)
"""
def __init__(self):
self._callback_dict = {}
def _add_callback(self, event_type, callback, user_data):
callback_list = self._callback_dict.get(event_type, [])
callback_list.append({'function': callback,
'user_data': user_data})
self._callback_dict[event_type] = callback_list
def _remove_callback(self, callback):
for callback_list in self._callback_dict.values():
for entry in callback_list:
if entry['function'] == callback:
callback_list.remove(entry)
def notify(self, context, message):
if message.get('event_type') not in self._callback_dict:
return
for entry in self._callback_dict[message.get('event_type')]:
entry['function'](context, message, entry.get('user_data'))
def callbacks(self):
return self._callback_dict
class CallbackPlugin(plugin.Plugin):
""" Plugin with a simple callback interface.
This class is provided as a convenience for producing a simple
plugin that only watches a couple of events. For example, here's
a subclass which prints a line the first time an instance is created.
class HookInstanceCreation(CallbackPlugin):
def __init__(self, _service_name):
super(HookInstanceCreation, self).__init__()
self._add_callback(self.magic, 'compute.instance.create.start')
def magic(self):
print "An instance was created!"
self._remove_callback(self, self.magic)
"""
def __init__(self, service_name):
super(CallbackPlugin, self).__init__(service_name)
self._callback_notifier = _CallbackNotifier()
self._add_notifier(self._callback_notifier)
def _add_callback(self, callback, event_type, user_data=None):
"""Add callback for a given event notification.
Subclasses can call this as an alternative to implementing
a fullblown notify notifier.
"""
self._callback_notifier._add_callback(event_type, callback, user_data)
def _remove_callback(self, callback):
"""Remove all notification callbacks to specified function."""
self._callback_notifier._remove_callback(callback)
|
{
"content_hash": "4f4743e890a655defe47dedd51cc95f5",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 78,
"avg_line_length": 35.44871794871795,
"alnum_prop": 0.6517179023508137,
"repo_name": "gspilio/nova",
"id": "c08ee69fbd60b9a3bd20e2daf729d04bfaaad023",
"size": "3402",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nova/openstack/common/plugin/callbackplugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "9099328"
},
{
"name": "Shell",
"bytes": "17117"
}
],
"symlink_target": ""
}
|
"""Integration tests for the `fluview_meta` endpoint."""
# standard library
import unittest
# third party
import mysql.connector
# first party
from delphi.epidata.client.delphi_epidata import Epidata
class FluviewMetaTests(unittest.TestCase):
"""Tests the `fluview_meta` endpoint."""
@classmethod
def setUpClass(cls):
"""Perform one-time setup."""
# use the local instance of the Epidata API
Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php'
def setUp(self):
"""Perform per-test setup."""
# connect to the `epidata` database and clear the `fluview` table
cnx = mysql.connector.connect(
user='user',
password='pass',
host='delphi_database_epidata',
database='epidata')
cur = cnx.cursor()
cur.execute('truncate table fluview')
cnx.commit()
cur.close()
# make connection and cursor available to test cases
self.cnx = cnx
self.cur = cnx.cursor()
def tearDown(self):
"""Perform per-test teardown."""
self.cur.close()
self.cnx.close()
def test_round_trip(self):
"""Make a simple round-trip with some sample data."""
# insert dummy data
self.cur.execute('''
INSERT INTO
`fluview` (`id`, `release_date`, `issue`, `epiweek`, `region`,
`lag`, `num_ili`, `num_patients`, `num_providers`, `wili`, `ili`,
`num_age_0`, `num_age_1`, `num_age_2`, `num_age_3`, `num_age_4`, `num_age_5`)
VALUES
(0, "2020-04-07", 202021, 202020, "nat", 1, 2, 3, 4, 3.14159, 1.41421,
10, 11, 12, 13, 14, 15),
(0, "2020-04-28", 202022, 202022, "hhs1", 5, 6, 7, 8, 1.11111, 2.22222,
20, 21, 22, 23, 24, 25)
''')
self.cnx.commit()
# make the request
response = Epidata.fluview_meta()
# assert that the right data came back
self.assertEqual(response, {
'result': 1,
'epidata': [{
'latest_update': '2020-04-28',
'latest_issue': 202022,
'table_rows': 2,
}],
'message': 'success',
})
|
{
"content_hash": "b717044578e0150ec730947c64bc52e5",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 85,
"avg_line_length": 27.253333333333334,
"alnum_prop": 0.5929549902152642,
"repo_name": "cmu-delphi/delphi-epidata",
"id": "137e9464a854a7a059003fcab380fe43a12306f0",
"size": "2044",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "integrations/server/test_fluview_meta.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "2852"
},
{
"name": "HTML",
"bytes": "727"
},
{
"name": "JavaScript",
"bytes": "18856"
},
{
"name": "Makefile",
"bytes": "5648"
},
{
"name": "PHP",
"bytes": "131735"
},
{
"name": "Python",
"bytes": "881368"
},
{
"name": "R",
"bytes": "17445"
},
{
"name": "Shell",
"bytes": "2024"
}
],
"symlink_target": ""
}
|
"""
Documents module views
"""
from django.shortcuts import get_object_or_404
from maker.core.rendering import render_to_response
from maker.documents.forms import FolderForm, DocumentForm, FileForm, FilterForm, WebLinkForm, \
MassActionForm
from django.template import RequestContext
from maker.documents.models import Document, Folder, File, WebLink
from maker.core.models import Object
from maker.core.views import user_denied
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.db.models import Q
from maker.core.decorators import maker_login_required, handle_response_format
from django.http import HttpResponse
from maker.core.conf import settings
from django.utils.encoding import smart_str
def _get_filter_query(args):
"Creates a generic query to filter Documents, Files and Weblinks based on FilterForm arguments"
query = Q()
for arg in args:
if hasattr(Document, arg) and args[arg]:
append = Q(**{str('document__' + arg + '__id'): long(args[arg])})
append = append | Q(**{str('file__' + arg + '__id'): long(args[arg])})
append = append | Q(**{str('weblink__' + arg + '__id'): long(args[arg])})
query = query & append
return query
def _get_direct_filter_query(args):
"Creates a query to filter Documents, Files or Weblinks based on FilterForm arguments"
query = Q()
for arg in args:
if hasattr(Document, arg) and args[arg]:
append = Q(**{str(arg + '__id'): long(args[arg])})
query = query & append
return query
def _get_default_context(request):
"Returns default context as a dict()"
folders = Object.filter_by_request(request, Folder.objects, mode="r")
massform = MassActionForm(request.user.get_profile())
context = {'folders': folders,
'massform': massform}
return context
def _process_mass_form(f):
"Pre-process request to handle mass action form for Document items"
def wrap(request, *args, **kwargs):
"Wrap"
user = request.user.get_profile()
if 'massform' in request.POST:
for key in request.POST:
if 'mass-object' in key:
try:
query = Q(object_type='maker.documents.models.Document') | \
Q(object_type='maker.documents.models.File') | \
Q(object_type='maker.documents.models.WebLink')
objects = Object.filter_by_request(request, Object.objects.filter(query))
object = objects.get(pk=request.POST[key])
form = MassActionForm(user, request.POST, instance=object)
if form.is_valid() and user.has_permission(object, mode='w'):
form.save()
except Exception:
pass
if 'mass-file' in key:
try:
file = File.objects.get(pk=request.POST[key])
form = MassActionForm(user, request.POST, instance=file)
if form.is_valid() and user.has_permission(file, mode='w'):
form.save()
except Exception:
pass
if 'mass-weblink' in key:
try:
link = WebLink.objects.get(pk=request.POST[key])
form = MassActionForm(user, request.POST, instance=link)
if form.is_valid() and user.has_permission(link, mode='w'):
form.save()
except Exception:
pass
if 'mass-document' in key:
try:
document = Document.objects.get(pk=request.POST[key])
form = MassActionForm(user, request.POST, instance=document)
if form.is_valid() and user.has_permission(document, mode='w'):
form.save()
except Exception:
pass
return f(request, *args, **kwargs)
wrap.__doc__ = f.__doc__
wrap.__name__ = f.__name__
return wrap
@handle_response_format
@maker_login_required
@_process_mass_form
def index(request, response_format='html'):
"Index page: displays all Documents"
query = Q(object_type='maker.documents.models.Document') | \
Q(object_type='maker.documents.models.File') | \
Q(object_type='maker.documents.models.WebLink')
if request.GET:
query = _get_filter_query(request.GET) & query
objects = Object.filter_by_request(request, Object.objects.filter(query).order_by('-last_updated'))
else:
objects = Object.filter_by_request(request, Object.objects.filter(query).order_by('-last_updated'))
filters = FilterForm(request.user.get_profile(), 'title', request.GET)
context = _get_default_context(request)
context.update({'filters':filters,
'objects':objects})
return render_to_response('documents/index', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
@_process_mass_form
def index_files(request, response_format='html'):
"Index_files page: displays all Files"
if request.GET:
query = _get_direct_filter_query(request.GET)
files = Object.filter_by_request(request, File.objects.filter(query).order_by('-last_updated'))
else:
files = Object.filter_by_request(request, File.objects.order_by('-last_updated'))
filters = FilterForm(request.user.get_profile(), 'title', request.GET)
context = _get_default_context(request)
context.update({'filters':filters,
'files':files})
return render_to_response('documents/index_files', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
@_process_mass_form
def index_weblinks(request, response_format='html'):
"Index_weblinks page: displays all WebLinks"
if request.GET:
query = _get_direct_filter_query(request.GET)
links = Object.filter_by_request(request, WebLink.objects.filter(query).order_by('-last_updated'))
else:
links = Object.filter_by_request(request, WebLink.objects.order_by('-last_updated'))
filters = FilterForm(request.user.get_profile(), 'title', request.GET)
context = _get_default_context(request)
context.update({'filters':filters,
'links':links})
return render_to_response('documents/index_weblinks', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
@_process_mass_form
def index_documents(request, response_format='html'):
"Index_documents page: displays all Documents"
if request.GET:
query = _get_direct_filter_query(request.GET)
documents = Object.filter_by_request(request, Document.objects.filter(query).order_by('-last_updated'))
else:
documents = Object.filter_by_request(request, Document.objects.order_by('-last_updated'))
filters = FilterForm(request.user.get_profile(), 'title', request.GET)
context = _get_default_context(request)
context.update({'filters':filters,
'documents':documents})
return render_to_response('documents/index_documents', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def folder_add(request, response_format='html'):
"New folder form"
if request.POST:
if not 'cancel' in request.POST:
folder = Folder()
form = FolderForm(request.user.get_profile(), None, request.POST, instance=folder)
if form.is_valid():
folder = form.save()
folder.set_user_from_request(request)
return HttpResponseRedirect(reverse('documents_folder_view', args=[folder.id]))
else:
return HttpResponseRedirect(reverse('document_index'))
else:
form = FolderForm(request.user.get_profile(), None)
context = _get_default_context(request)
context.update({'form': form})
return render_to_response('documents/folder_add', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def folder_add_typed(request, folder_id=None, response_format='html'):
"Folder add to preselected folder"
folder = None
if folder_id:
folder = get_object_or_404(Folder, pk=folder_id)
if not request.user.get_profile().has_permission(folder, mode='x'):
folder = None
if request.POST:
if not 'cancel' in request.POST:
folder = Folder()
form = FolderForm(request.user.get_profile(), folder_id, request.POST, instance=folder)
if form.is_valid():
folder = form.save()
folder.set_user_from_request(request)
return HttpResponseRedirect(reverse('documents_folder_view', args=[folder.id]))
else:
return HttpResponseRedirect(reverse('document_index'))
else:
form = FolderForm(request.user.get_profile(), folder_id)
context = _get_default_context(request)
context.update({'form': form,
'folder':folder})
return render_to_response('documents/folder_add_typed', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
@_process_mass_form
def folder_view(request, folder_id, response_format='html'):
"Single folder view page"
folder = get_object_or_404(Folder, pk=folder_id)
if not request.user.get_profile().has_permission(folder):
return user_denied(request, message="You don't have access to this Folder")
query = Q(
object_type='maker.documents.models.Document') | Q(
object_type='maker.documents.models.File') | Q(
object_type='maker.documents.models.WebLink')
query = query & (Q(document__folder=folder) | Q(file__folder=folder) | Q(weblink__folder=folder))
if request.GET:
query = query & _get_filter_query(request.GET)
objects = Object.filter_by_request(request, Object.objects.filter(query).order_by('-last_updated'))
# objects = objects.order_by('-last_updated')
else:
objects = Object.filter_by_request(request, Object.objects.filter(query).order_by('-last_updated'))
# objects = objects.order_by('-last_updated')
subfolders = Folder.objects.filter(parent=folder)
filters = FilterForm(request.user.get_profile(), 'title', request.GET)
context = _get_default_context(request)
context.update({'folder': folder,
'objects':objects,
'subfolders':subfolders,
'filters':filters})
return render_to_response('documents/folder_view', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def folder_edit(request, folder_id, response_format='html'):
"Folder edit page"
folder = get_object_or_404(Folder, pk=folder_id)
if not request.user.get_profile().has_permission(folder, mode='w'):
return user_denied(request, message="You don't have access to this Folder")
if request.POST:
if not 'cancel' in request.POST:
form = FolderForm(request.user.get_profile(), folder_id, request.POST, instance=folder)
if form.is_valid():
folder = form.save()
return HttpResponseRedirect(reverse('documents_folder_view', args=[folder.id]))
else:
return HttpResponseRedirect(reverse('documents_folder_view', args=[folder.id]))
else:
form = FolderForm(request.user.get_profile(), folder_id, instance=folder)
context = _get_default_context(request)
context.update({'form': form,
'folder': folder})
return render_to_response('documents/folder_edit', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def folder_delete(request, folder_id, response_format='html'):
"Folder delete"
folder = get_object_or_404(Folder, pk=folder_id)
if not request.user.get_profile().has_permission(folder, mode='w'):
return user_denied(request, message="You don't have access to this Folder")
if request.POST:
if 'delete' in request.POST:
if 'trash' in request.POST:
folder.trash = True
folder.save()
else:
folder.delete()
return HttpResponseRedirect(reverse('document_index'))
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse('documents_folder_view', args=[folder.id]))
query = Q(object_type='maker.documents.models.Document') | Q(
object_type='maker.documents.models.File') | Q(
object_type='maker.documents.models.WebLink')
query = query & (Q(document__folder=folder) | Q(file__folder=folder) | Q(weblink__folder=folder))
if request.GET:
query = _get_filter_query(request.GET)
objects = Object.filter_by_request(request, Object.objects.filter(query))
# objects = objects.order_by('-last_updated')
else:
objects = Object.filter_by_request(request, Object.objects.filter(query))
# objects = objects.order_by('-last_updated')
context = _get_default_context(request)
context.update({'folder': folder,
'objects':objects})
return render_to_response('documents/folder_delete', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def document_add(request, response_format='html'):
"new document form"
if request.POST:
if not 'cancel' in request.POST:
document = Document()
form = DocumentForm(request.user.get_profile(), None, request.POST, instance=document)
if form.is_valid():
document = form.save()
document.set_user_from_request(request)
return HttpResponseRedirect(reverse('documents_document_view', args=[document.id]))
else:
return HttpResponseRedirect(reverse('document_index'))
else:
form = DocumentForm(request.user.get_profile(), None)
context = _get_default_context(request)
context.update({'form': form})
return render_to_response('documents/document_add', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def document_add_typed(request, folder_id=None, response_format='html'):
"Document add to preselected folder"
folder = None
if folder_id:
folder = get_object_or_404(Folder, pk=folder_id)
if not request.user.get_profile().has_permission(folder, mode='x'):
folder = None
document = Document()
if request.POST:
if not 'cancel' in request.POST:
form = DocumentForm(request.user.get_profile(), folder_id, request.POST, instance=document)
if form.is_valid():
document = form.save()
document.set_user_from_request(request)
return HttpResponseRedirect(reverse('documents_document_view', args=[document.id]))
else:
return HttpResponseRedirect(reverse('document_index'))
else:
form = DocumentForm(request.user.get_profile(), folder_id)
context = _get_default_context(request)
context.update({'form': form,
'folder':folder})
return render_to_response('documents/document_add_typed', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def document_view(request, document_id, response_format='html'):
"Single document view page"
document = get_object_or_404(Document, pk=document_id)
if not request.user.get_profile().has_permission(document):
return user_denied(request, message="You don't have access to this Document")
context = _get_default_context(request)
context.update({'document':document})
return render_to_response('documents/document_view', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def document_edit(request, document_id, response_format='html'):
"Document edit page"
document = get_object_or_404(Document, pk=document_id)
if not request.user.get_profile().has_permission(document, mode='w'):
return user_denied(request, message="You don't have access to this Document")
if request.POST:
if not 'cancel' in request.POST:
form = DocumentForm(request.user.get_profile(), None, request.POST, instance=document)
if form.is_valid():
document = form.save()
return HttpResponseRedirect(reverse('documents_document_view', args=[document.id]))
else:
return HttpResponseRedirect(reverse('documents_document_view', args=[document.id]))
else:
form = DocumentForm(request.user.get_profile(), None, instance=document)
context = _get_default_context(request)
context.update({'form': form,
'document':document})
return render_to_response('documents/document_edit', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def document_delete(request, document_id, response_format='html'):
"Document delete"
document = get_object_or_404(Document, pk=document_id)
if not request.user.get_profile().has_permission(document, mode='w'):
return user_denied(request, message="You don't have access to this Document")
if request.POST:
if 'delete' in request.POST:
if 'trash' in request.POST:
document.trash = True
document.save()
else:
document.delete()
return HttpResponseRedirect(reverse('document_index'))
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse('documents_document_view', args=[document.id]))
context = _get_default_context(request)
context.update({'document':document})
return render_to_response('documents/document_delete', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def file_upload(request, response_format='html'):
"New file form"
if request.POST:
if not 'cancel' in request.POST:
file = File()
form = FileForm(request.user.get_profile(), None, request.POST, request.FILES, instance=file)
if form.is_valid():
file = form.save()
file.set_user_from_request(request)
return HttpResponseRedirect(reverse('documents_file_view', args=[file.id]))
else:
return HttpResponseRedirect(reverse('document_index'))
else:
form = FileForm(request.user.get_profile(), None)
context = _get_default_context(request)
context.update({'form': form})
return render_to_response('documents/file_upload', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def file_upload_typed(request, folder_id=None, response_format='html'):
"File upload to preselected folder"
folder = None
if folder_id:
folder = get_object_or_404(Folder, pk=folder_id)
if not request.user.get_profile().has_permission(folder, mode='x'):
folder = None
if request.POST:
if not 'cancel' in request.POST:
form = FileForm(request.user.get_profile(), folder_id, request.POST, request.FILES)
if form.is_valid():
file = form.save()
file.set_user_from_request(request)
return HttpResponseRedirect(reverse('documents_file_view', args=[file.id]))
else:
return HttpResponseRedirect(reverse('document_index'))
else:
form = FileForm(request.user.get_profile(), folder_id)
context = _get_default_context(request)
context.update({'form': form,
'folder':folder})
return render_to_response('documents/file_upload_typed', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def file_view(request, file_id, response_format='html'):
"Single file view page"
file = get_object_or_404(File, pk=file_id)
if not request.user.get_profile().has_permission(file):
return user_denied(request, message="You don't have access to this File")
if request.GET and 'download' in request.GET:
"Return url to download a file"
fullpath = getattr(settings, 'MEDIA_ROOT', './static/media/')
data = ''
try:
data = open(fullpath + str(file.content)).read()
except IOError:
pass
response = HttpResponse(data, content_type='application/x-download')
response['Content-Disposition'] = 'attachment; filename="%s"' % smart_str(file.content)
return response
context = _get_default_context(request)
context.update({'file':file})
return render_to_response('documents/file_view', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def file_delete(request, file_id, response_format='html'):
"File delete"
file = get_object_or_404(File, pk=file_id)
if not request.user.get_profile().has_permission(file, mode='w'):
return user_denied(request, message="You don't have access to this File")
if request.POST:
if 'delete' in request.POST:
if 'trash' in request.POST:
file.trash = True
file.save()
else:
file.delete()
return HttpResponseRedirect(reverse('document_index'))
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse('documents_file_view', args=[file.id]))
context = _get_default_context(request)
context.update({'file':file})
return render_to_response('documents/file_delete', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def file_edit(request, file_id, response_format='html'):
"File edit page"
file = get_object_or_404(File, pk=file_id)
if not request.user.get_profile().has_permission(file, mode='w'):
return user_denied(request, message="You don't have access to this File")
if request.POST:
if not 'cancel' in request.POST:
form = FileForm(request.user.get_profile(), None, request.POST, request.FILES, instance=file)
if form.is_valid():
file = form.save()
return HttpResponseRedirect(reverse('documents_file_view', args=[file.id]))
else:
return HttpResponseRedirect(reverse('documents_file_view', args=[file.id]))
else:
form = FileForm(request.user.get_profile(), None, instance=file)
context = _get_default_context(request)
context.update({'form': form,
'file':file})
return render_to_response('documents/file_edit', context,
context_instance=RequestContext(request),
response_format=response_format)
# Web Links
@handle_response_format
@maker_login_required
def weblink_add(request, response_format='html'):
"New web link form"
if request.POST:
if not 'cancel' in request.POST:
link = WebLink()
form = WebLinkForm(request.user.get_profile(), None, request.POST, instance=link)
if form.is_valid():
link = form.save()
link.set_user_from_request(request)
return HttpResponseRedirect(reverse('documents_weblink_view', args=[link.id]))
else:
return HttpResponseRedirect(reverse('document_index'))
else:
form = WebLinkForm(request.user.get_profile(), None)
context = _get_default_context(request)
context.update({'form': form,
'file':file})
return render_to_response('documents/weblink_add', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def weblink_add_typed(request, folder_id=None, response_format='html'):
"Web link add to preselected folder"
folder = None
if folder_id:
folder = get_object_or_404(Folder, pk=folder_id)
if not request.user.get_profile().has_permission(folder, mode='x'):
folder = None
if request.POST:
if not 'cancel' in request.POST:
link = WebLink()
form = WebLinkForm(request.user.get_profile(), folder_id, request.POST, instance=link)
if form.is_valid():
link = form.save()
link.set_user_from_request(request)
return HttpResponseRedirect(reverse('documents_weblink_view', args=[link.id]))
else:
return HttpResponseRedirect(reverse('document_index'))
else:
form = WebLinkForm(request.user.get_profile(), folder_id)
context = _get_default_context(request)
context.update({'form': form,
'folder':folder})
return render_to_response('documents/weblink_add_typed', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def weblink_view(request, weblink_id, response_format='html'):
"Weblink page"
link = get_object_or_404(WebLink, pk=weblink_id)
if not request.user.get_profile().has_permission(link):
return user_denied(request, message="You don't have access to this Web Link")
context = _get_default_context(request)
context.update({'link':link})
return render_to_response('documents/weblink_view', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def weblink_edit(request, weblink_id, response_format='html'):
"WebLink edit page"
link = get_object_or_404(WebLink, pk=weblink_id)
if not request.user.get_profile().has_permission(link, mode='w'):
return user_denied(request, message="You don't have access to this Web Link")
if request.POST:
if not 'cancel' in request.POST:
form = WebLinkForm(request.user.get_profile(), None, request.POST, instance=link)
if form.is_valid():
link = form.save()
return HttpResponseRedirect(reverse('documents_weblink_view', args=[link.id]))
else:
return HttpResponseRedirect(reverse('documents_weblink_view', args=[link.id]))
else:
form = WebLinkForm(request.user.get_profile(), None, instance=link)
context = _get_default_context(request)
context.update({'form': form,
'link':link})
return render_to_response('documents/weblink_edit', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@maker_login_required
def weblink_delete(request, weblink_id, response_format='html'):
"WebLink delete"
link = get_object_or_404(WebLink, pk=weblink_id)
if not request.user.get_profile().has_permission(link, mode='w'):
return user_denied(request, message="You don't have access to this Web Link")
if request.POST:
if 'delete' in request.POST:
if 'trash' in request.POST:
link.trash = True
link.save()
else:
link.delete()
return HttpResponseRedirect(reverse('document_index'))
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse('documents_weblink_view', args=[link.id]))
context = _get_default_context(request)
context.update({'link':link})
return render_to_response('documents/weblink_delete', context,
context_instance=RequestContext(request),
response_format=response_format)
|
{
"content_hash": "1fd635c0b8c68e00e821882274ba4aa4",
"timestamp": "",
"source": "github",
"line_count": 788,
"max_line_length": 111,
"avg_line_length": 39.44796954314721,
"alnum_prop": 0.5978767894482869,
"repo_name": "alejo8591/maker",
"id": "baf44d71d1139554009c4745106c04f4a4bd4a94",
"size": "31197",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "documents/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1578070"
},
{
"name": "Perl",
"bytes": "164"
},
{
"name": "Python",
"bytes": "2863599"
},
{
"name": "Shell",
"bytes": "3561"
}
],
"symlink_target": ""
}
|
import hsm
from hsm import actor
from hsm import runtime
class Machine(actor.TopState):
def __init__(self):
self._error = None
def on_error(self, error):
self._error = error
self.transition(ErrorState)
@actor.initial_state
class Off(Machine):
def on_start(self):
self.transition(On)
class On(Machine):
def on_stop(self):
self.transition(Off)
@actor.initial_state
class WaitCommand(On):
def on_start_server(self):
self.transition(WaitConnection)
class WaitConnection(On):
def on_stop_server(self):
self.transition(WaitCommand)
class ErrorState(Machine):
def _enter(self):
print "enter %s State, error code = %s" % (self.__class__.__name__, self._error)
if __name__ == '__main__':
print "test simple hsm"
mac = Machine()
st = mac.get_state()
assert(Off == st)
mac.send_start()
runtime.dispatch_all_msg()
st = mac.get_state()
assert(WaitCommand == st)
mac.send_error("ERROR 404")
runtime.dispatch_all_msg()
st = mac.get_state()
assert(ErrorState == st)
|
{
"content_hash": "ec1040e4b3742dd70d7a46560d4e7a7c",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 83,
"avg_line_length": 22.51063829787234,
"alnum_prop": 0.6502835538752363,
"repo_name": "eneabio/hsm",
"id": "7ba376a2b80f8fa142977f708cf2ed11d366f556",
"size": "1058",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hsm/test/simple_hsm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "39470"
}
],
"symlink_target": ""
}
|
from os_win import exceptions as os_win_exc
from unittest import mock
from oslo_config import cfg
from nova import exception
from nova.objects import migrate_data as migrate_data_obj
from nova.tests.unit import fake_instance
from nova.tests.unit.virt.hyperv import test_base
from nova.virt.hyperv import livemigrationops
from nova.virt.hyperv import serialconsoleops
CONF = cfg.CONF
class LiveMigrationOpsTestCase(test_base.HyperVBaseTestCase):
"""Unit tests for the Hyper-V LiveMigrationOps class."""
def setUp(self):
super(LiveMigrationOpsTestCase, self).setUp()
self.context = 'fake_context'
self._livemigrops = livemigrationops.LiveMigrationOps()
self._livemigrops._livemigrutils = mock.MagicMock()
self._livemigrops._pathutils = mock.MagicMock()
self._livemigrops._block_dev_man = mock.MagicMock()
self._pathutils = self._livemigrops._pathutils
@mock.patch.object(serialconsoleops.SerialConsoleOps,
'stop_console_handler')
@mock.patch('nova.virt.hyperv.vmops.VMOps.copy_vm_dvd_disks')
def _test_live_migration(self, mock_copy_dvd_disk,
mock_stop_console_handler,
side_effect=None,
shared_storage=False,
migrate_data_received=True,
migrate_data_version='1.1'):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_post = mock.MagicMock()
mock_recover = mock.MagicMock()
mock_copy_logs = self._livemigrops._pathutils.copy_vm_console_logs
fake_dest = mock.sentinel.DESTINATION
mock_check_shared_inst_dir = (
self._pathutils.check_remote_instances_dir_shared)
mock_check_shared_inst_dir.return_value = shared_storage
self._livemigrops._livemigrutils.live_migrate_vm.side_effect = [
side_effect]
if migrate_data_received:
migrate_data = migrate_data_obj.HyperVLiveMigrateData()
if migrate_data_version != '1.0':
migrate_data.is_shared_instance_path = shared_storage
else:
migrate_data = None
self._livemigrops.live_migration(context=self.context,
instance_ref=mock_instance,
dest=fake_dest,
post_method=mock_post,
recover_method=mock_recover,
block_migration=(
mock.sentinel.block_migr),
migrate_data=migrate_data)
if side_effect is os_win_exc.HyperVException:
mock_recover.assert_called_once_with(self.context, mock_instance,
fake_dest,
migrate_data)
mock_post.assert_not_called()
else:
post_call_args = mock_post.call_args_list
self.assertEqual(1, len(post_call_args))
post_call_args_list = post_call_args[0][0]
self.assertEqual((self.context, mock_instance,
fake_dest, mock.sentinel.block_migr),
post_call_args_list[:-1])
# The last argument, the migrate_data object, should be created
# by the callee if not received.
migrate_data_arg = post_call_args_list[-1]
self.assertIsInstance(
migrate_data_arg,
migrate_data_obj.HyperVLiveMigrateData)
self.assertEqual(shared_storage,
migrate_data_arg.is_shared_instance_path)
if not migrate_data_received or migrate_data_version == '1.0':
mock_check_shared_inst_dir.assert_called_once_with(fake_dest)
else:
self.assertFalse(mock_check_shared_inst_dir.called)
mock_stop_console_handler.assert_called_once_with(mock_instance.name)
if not shared_storage:
mock_copy_logs.assert_called_once_with(mock_instance.name,
fake_dest)
mock_copy_dvd_disk.assert_called_once_with(mock_instance.name,
fake_dest)
else:
self.assertFalse(mock_copy_logs.called)
self.assertFalse(mock_copy_dvd_disk.called)
mock_live_migr = self._livemigrops._livemigrutils.live_migrate_vm
mock_live_migr.assert_called_once_with(
mock_instance.name,
fake_dest,
migrate_disks=not shared_storage)
def test_live_migration(self):
self._test_live_migration(migrate_data_received=False)
def test_live_migration_old_migrate_data_version(self):
self._test_live_migration(migrate_data_version='1.0')
def test_live_migration_exception(self):
self._test_live_migration(side_effect=os_win_exc.HyperVException)
def test_live_migration_shared_storage(self):
self._test_live_migration(shared_storage=True)
@mock.patch('nova.virt.hyperv.volumeops.VolumeOps.get_disk_path_mapping')
@mock.patch('nova.virt.hyperv.imagecache.ImageCache.get_cached_image')
@mock.patch('nova.virt.hyperv.volumeops.VolumeOps.connect_volumes')
def _test_pre_live_migration(self, mock_initialize_connection,
mock_get_cached_image,
mock_get_disk_path_mapping,
phys_disks_attached=True):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_instance.image_ref = "fake_image_ref"
mock_get_disk_path_mapping.return_value = (
mock.sentinel.disk_path_mapping if phys_disks_attached
else None)
bdman = self._livemigrops._block_dev_man
mock_is_boot_from_vol = bdman.is_boot_from_volume
mock_is_boot_from_vol.return_value = None
CONF.set_override('use_cow_images', True)
self._livemigrops.pre_live_migration(
self.context, mock_instance,
block_device_info=mock.sentinel.BLOCK_INFO,
network_info=mock.sentinel.NET_INFO)
check_config = (
self._livemigrops._livemigrutils.check_live_migration_config)
check_config.assert_called_once_with()
mock_is_boot_from_vol.assert_called_once_with(
mock.sentinel.BLOCK_INFO)
mock_get_cached_image.assert_called_once_with(self.context,
mock_instance)
mock_initialize_connection.assert_called_once_with(
mock.sentinel.BLOCK_INFO)
mock_get_disk_path_mapping.assert_called_once_with(
mock.sentinel.BLOCK_INFO)
if phys_disks_attached:
livemigrutils = self._livemigrops._livemigrutils
livemigrutils.create_planned_vm.assert_called_once_with(
mock_instance.name,
mock_instance.host,
mock.sentinel.disk_path_mapping)
def test_pre_live_migration(self):
self._test_pre_live_migration()
def test_pre_live_migration_invalid_disk_mapping(self):
self._test_pre_live_migration(phys_disks_attached=False)
@mock.patch('nova.virt.hyperv.volumeops.VolumeOps.disconnect_volumes')
def _test_post_live_migration(self, mock_disconnect_volumes,
shared_storage=False):
migrate_data = migrate_data_obj.HyperVLiveMigrateData(
is_shared_instance_path=shared_storage)
self._livemigrops.post_live_migration(
self.context, mock.sentinel.instance,
mock.sentinel.block_device_info,
migrate_data)
mock_disconnect_volumes.assert_called_once_with(
mock.sentinel.block_device_info)
mock_get_inst_dir = self._pathutils.get_instance_dir
if not shared_storage:
mock_get_inst_dir.assert_called_once_with(
mock.sentinel.instance.name,
create_dir=False, remove_dir=True)
else:
self.assertFalse(mock_get_inst_dir.called)
def test_post_block_migration(self):
self._test_post_live_migration()
def test_post_live_migration_shared_storage(self):
self._test_post_live_migration(shared_storage=True)
@mock.patch.object(migrate_data_obj, 'HyperVLiveMigrateData')
def test_check_can_live_migrate_destination(self, mock_migr_data_cls):
mock_instance = fake_instance.fake_instance_obj(self.context)
migr_data = self._livemigrops.check_can_live_migrate_destination(
mock.sentinel.context, mock_instance, mock.sentinel.src_comp_info,
mock.sentinel.dest_comp_info)
mock_check_shared_inst_dir = (
self._pathutils.check_remote_instances_dir_shared)
mock_check_shared_inst_dir.assert_called_once_with(mock_instance.host)
self.assertEqual(mock_migr_data_cls.return_value, migr_data)
self.assertEqual(mock_check_shared_inst_dir.return_value,
migr_data.is_shared_instance_path)
@mock.patch('nova.virt.hyperv.vmops.VMOps.plug_vifs')
def test_post_live_migration_at_destination(self, mock_plug_vifs):
self._livemigrops.post_live_migration_at_destination(
self.context, mock.sentinel.instance,
network_info=mock.sentinel.NET_INFO,
block_migration=mock.sentinel.BLOCK_INFO)
mock_plug_vifs.assert_called_once_with(mock.sentinel.instance,
mock.sentinel.NET_INFO)
def test_check_can_live_migrate_destination_exception(self):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_check = self._pathutils.check_remote_instances_dir_shared
mock_check.side_effect = exception.FileNotFound(file_path='C:\\baddir')
self.assertRaises(
exception.MigrationPreCheckError,
self._livemigrops.check_can_live_migrate_destination,
mock.sentinel.context, mock_instance, mock.sentinel.src_comp_info,
mock.sentinel.dest_comp_info)
|
{
"content_hash": "89c113824de2c349331f11bd856fbb1b",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 79,
"avg_line_length": 46.205357142857146,
"alnum_prop": 0.6079227053140097,
"repo_name": "openstack/nova",
"id": "79cb4318c58e0a7c60c2c9b6a4f514100732b188",
"size": "10989",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/tests/unit/virt/hyperv/test_livemigrationops.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "3545"
},
{
"name": "Mako",
"bytes": "1952"
},
{
"name": "Python",
"bytes": "23261880"
},
{
"name": "Shell",
"bytes": "28113"
},
{
"name": "Smarty",
"bytes": "507244"
}
],
"symlink_target": ""
}
|
import logging
from collections import defaultdict
from androguard.decompiler.dad.basic_blocks import (CatchBlock,
Condition,
LoopBlock,
ShortCircuitBlock,
TryBlock)
from androguard.decompiler.dad.graph import Graph
from androguard.decompiler.dad.node import Interval
from androguard.decompiler.dad.util import common_dom
logger = logging.getLogger('dad.control_flow')
def intervals(graph):
'''
Compute the intervals of the graph
Returns
interval_graph: a graph of the intervals of G
interv_heads: a dict of (header node, interval)
'''
interval_graph = Graph() # graph of intervals
heads = [graph.entry] # list of header nodes
interv_heads = {} # interv_heads[i] = interval of header i
processed = dict([(i, False) for i in graph])
edges = defaultdict(list)
while heads:
head = heads.pop(0)
if not processed[head]:
processed[head] = True
interv_heads[head] = Interval(head)
# Check if there is a node which has all its predecessor in the
# current interval. If there is, add that node to the interval and
# repeat until all the possible nodes have been added.
change = True
while change:
change = False
for node in graph.rpo[1:]:
if all(
p in interv_heads[head] for p in graph.all_preds(node)):
change |= interv_heads[head].add_node(node)
# At this stage, a node which is not in the interval, but has one
# of its predecessor in it, is the header of another interval. So
# we add all such nodes to the header list.
for node in graph:
if node not in interv_heads[head] and node not in heads:
if any(
p in interv_heads[head] for p in graph.all_preds(node)):
edges[interv_heads[head]].append(node)
assert(node not in heads)
heads.append(node)
interval_graph.add_node(interv_heads[head])
interv_heads[head].compute_end(graph)
# Edges is a mapping of 'Interval -> [header nodes of interval successors]'
for interval, heads in list(edges.items()):
for head in heads:
interval_graph.add_edge(interval, interv_heads[head])
interval_graph.entry = graph.entry.interval
if graph.exit:
interval_graph.exit = graph.exit.interval
return interval_graph, interv_heads
def derived_sequence(graph):
'''
Compute the derived sequence of the graph G
The intervals of G are collapsed into nodes, intervals of these nodes are
built, and the process is repeated iteratively until we obtain a single
node (if the graph is not irreducible)
'''
deriv_seq = [graph]
deriv_interv = []
single_node = False
while not single_node:
interv_graph, interv_heads = intervals(graph)
deriv_interv.append(interv_heads)
single_node = len(interv_graph) == 1
if not single_node:
deriv_seq.append(interv_graph)
graph = interv_graph
graph.compute_rpo()
return deriv_seq, deriv_interv
def mark_loop_rec(graph, node, s_num, e_num, interval, nodes_in_loop):
if node in nodes_in_loop:
return
nodes_in_loop.append(node)
for pred in graph.all_preds(node):
if s_num < pred.num <= e_num and pred in interval:
mark_loop_rec(graph, pred, s_num, e_num, interval, nodes_in_loop)
def mark_loop(graph, start, end, interval):
logger.debug('MARKLOOP : %s END : %s', start, end)
head = start.get_head()
latch = end.get_end()
nodes_in_loop = [head]
mark_loop_rec(graph, latch, head.num, latch.num, interval, nodes_in_loop)
head.startloop = True
head.latch = latch
return nodes_in_loop
def loop_type(start, end, nodes_in_loop):
if end.type.is_cond:
if start.type.is_cond:
if start.true in nodes_in_loop and start.false in nodes_in_loop:
start.looptype.is_posttest = True
else:
start.looptype.is_pretest = True
else:
start.looptype.is_posttest = True
else:
if start.type.is_cond:
if start.true in nodes_in_loop and start.false in nodes_in_loop:
start.looptype.is_endless = True
else:
start.looptype.is_pretest = True
else:
start.looptype.is_endless = True
def loop_follow(start, end, nodes_in_loop):
follow = None
if start.looptype.is_pretest:
if start.true in nodes_in_loop:
follow = start.false
else:
follow = start.true
elif start.looptype.is_posttest:
if end.true in nodes_in_loop:
follow = end.false
else:
follow = end.true
else:
num_next = float('inf')
for node in nodes_in_loop:
if node.type.is_cond:
if (node.true.num < num_next
and node.true not in nodes_in_loop):
follow = node.true
num_next = follow.num
elif (node.false.num < num_next
and node.false not in nodes_in_loop):
follow = node.false
num_next = follow.num
start.follow['loop'] = follow
for node in nodes_in_loop:
node.follow['loop'] = follow
logger.debug('Start of loop %s', start)
logger.debug('Follow of loop: %s', start.follow['loop'])
def loop_struct(graphs_list, intervals_list):
first_graph = graphs_list[0]
for i, graph in enumerate(graphs_list):
interval = intervals_list[i]
for head in sorted(list(interval.keys()), key=lambda x: x.num):
loop_nodes = []
for node in graph.all_preds(head):
if node.interval is head.interval:
lnodes = mark_loop(first_graph, head, node, head.interval)
for lnode in lnodes:
if lnode not in loop_nodes:
loop_nodes.append(lnode)
head.get_head().loop_nodes = loop_nodes
def if_struct(graph, idoms):
unresolved = set()
for node in graph.post_order():
if node.type.is_cond:
ldominates = []
for n, idom in idoms.items():
if node is idom and len(graph.reverse_edges.get(n, [])) > 1:
ldominates.append(n)
if len(ldominates) > 0:
n = max(ldominates, key=lambda x: x.num)
node.follow['if'] = n
for x in unresolved.copy():
if node.num < x.num < n.num:
x.follow['if'] = n
unresolved.remove(x)
else:
unresolved.add(node)
return unresolved
def switch_struct(graph, idoms):
unresolved = set()
for node in graph.post_order():
if node.type.is_switch:
m = node
for suc in graph.sucs(node):
if idoms[suc] is not node:
m = common_dom(idoms, node, suc)
ldominates = []
for n, dom in idoms.items():
if m is dom and len(graph.all_preds(n)) > 1:
ldominates.append(n)
if len(ldominates) > 0:
n = max(ldominates, key=lambda x: x.num)
node.follow['switch'] = n
for x in unresolved:
x.follow['switch'] = n
unresolved = set()
else:
unresolved.add(node)
node.order_cases()
# TODO: deal with preds which are in catch
def short_circuit_struct(graph, idom, node_map):
def MergeNodes(node1, node2, is_and, is_not):
lpreds = set()
ldests = set()
for node in (node1, node2):
lpreds.update(graph.preds(node))
ldests.update(graph.sucs(node))
graph.remove_node(node)
done.add(node)
lpreds.difference_update((node1, node2))
ldests.difference_update((node1, node2))
entry = graph.entry in (node1, node2)
new_name = '%s+%s' % (node1.name, node2.name)
condition = Condition(node1, node2, is_and, is_not)
new_node = ShortCircuitBlock(new_name, condition)
for old_n, new_n in node_map.items():
if new_n in (node1, node2):
node_map[old_n] = new_node
node_map[node1] = new_node
node_map[node2] = new_node
idom[new_node] = idom[node1]
idom.pop(node1)
idom.pop(node2)
new_node.copy_from(node1)
graph.add_node(new_node)
for pred in lpreds:
pred.update_attribute_with(node_map)
graph.add_edge(node_map.get(pred, pred), new_node)
for dest in ldests:
graph.add_edge(new_node, node_map.get(dest, dest))
if entry:
graph.entry = new_node
return new_node
change = True
while change:
change = False
done = set()
for node in graph.post_order():
if node.type.is_cond and node not in done:
then = node.true
els = node.false
if node in (then, els):
continue
if then.type.is_cond and len(graph.preds(then)) == 1:
if node in (then.true, then.false):
continue
if then.false is els: # node && t
change = True
merged_node = MergeNodes(node, then, True, False)
merged_node.true = then.true
merged_node.false = els
elif then.true is els: # !node || t
change = True
merged_node = MergeNodes(node, then, False, True)
merged_node.true = els
merged_node.false = then.false
elif els.type.is_cond and len(graph.preds(els)) == 1:
if node in (els.false, els.true):
continue
if els.false is then: # !node && e
change = True
merged_node = MergeNodes(node, els, True, True)
merged_node.true = els.true
merged_node.false = then
elif els.true is then: # node || e
change = True
merged_node = MergeNodes(node, els, False, False)
merged_node.true = then
merged_node.false = els.false
done.add(node)
if change:
graph.compute_rpo()
def while_block_struct(graph, node_map):
change = False
for node in graph.rpo[:]:
if node.startloop:
change = True
new_node = LoopBlock(node.name, node)
node_map[node] = new_node
new_node.copy_from(node)
entry = node is graph.entry
lpreds = graph.preds(node)
lsuccs = graph.sucs(node)
for pred in lpreds:
graph.add_edge(node_map.get(pred, pred), new_node)
for suc in lsuccs:
graph.add_edge(new_node, node_map.get(suc, suc))
if entry:
graph.entry = new_node
if node.type.is_cond:
new_node.true = node.true
new_node.false = node.false
graph.add_node(new_node)
graph.remove_node(node)
if change:
graph.compute_rpo()
def catch_struct(graph, idoms):
block_try_nodes = {}
node_map = {}
for catch_block in graph.reverse_catch_edges:
if catch_block in graph.catch_edges:
continue
catch_node = CatchBlock(catch_block)
try_block = idoms[catch_block]
try_node = block_try_nodes.get(try_block)
if try_node is None:
block_try_nodes[try_block] = TryBlock(try_block)
try_node = block_try_nodes[try_block]
node_map[try_block] = try_node
for pred in graph.all_preds(try_block):
pred.update_attribute_with(node_map)
if try_block in graph.sucs(pred):
graph.edges[pred].remove(try_block)
graph.add_edge(pred, try_node)
if try_block.type.is_stmt:
follow = graph.sucs(try_block)
if follow:
try_node.follow = graph.sucs(try_block)[0]
else:
try_node.follow = None
elif try_block.type.is_cond:
loop_follow = try_block.follow['loop']
if loop_follow:
try_node.follow = loop_follow
else:
try_node.follow = try_block.follow['if']
elif try_block.type.is_switch:
try_node.follow = try_block.follow['switch']
else: # return or throw
try_node.follow = None
try_node.add_catch_node(catch_node)
for node in graph.nodes:
node.update_attribute_with(node_map)
if graph.entry in node_map:
graph.entry = node_map[graph.entry]
def update_dom(idoms, node_map):
for n, dom in idoms.items():
idoms[n] = node_map.get(dom, dom)
def identify_structures(graph, idoms):
Gi, Li = derived_sequence(graph)
switch_struct(graph, idoms)
loop_struct(Gi, Li)
node_map = {}
short_circuit_struct(graph, idoms, node_map)
update_dom(idoms, node_map)
if_unresolved = if_struct(graph, idoms)
while_block_struct(graph, node_map)
update_dom(idoms, node_map)
loop_starts = []
for node in graph.rpo:
node.update_attribute_with(node_map)
if node.startloop:
loop_starts.append(node)
for node in loop_starts:
loop_type(node, node.latch, node.loop_nodes)
loop_follow(node, node.latch, node.loop_nodes)
for node in if_unresolved:
follows = [n for n in (node.follow['loop'],
node.follow['switch']) if n]
if len(follows) >= 1:
follow = min(follows, key=lambda x: x.num)
node.follow['if'] = follow
catch_struct(graph, idoms)
|
{
"content_hash": "c25183ced6aa35b3e827fdbdc0e179ac",
"timestamp": "",
"source": "github",
"line_count": 420,
"max_line_length": 79,
"avg_line_length": 35.00238095238095,
"alnum_prop": 0.5328889191211482,
"repo_name": "xysec/androguard",
"id": "c8882a0d0bf779e95d9f2c8903bc58a05c8d6a97",
"size": "15376",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "androguard/decompiler/dad/control_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "384130"
},
{
"name": "C++",
"bytes": "57006"
},
{
"name": "Makefile",
"bytes": "6008"
},
{
"name": "Python",
"bytes": "27560597"
}
],
"symlink_target": ""
}
|
import logging
import configparser
from novaclient import client as novaclient
from neutronclient.v2_0 import client as neutronclient
import utils
from neutronapi import NeutronIF
logging.basicConfig(level=logging.INFO)
class Booter():
"""
Boots the corresponding topology.
"""
def __init__(self, topo='fw_lb', *args, **kwargs):
"""
Read from config.ini the configuration for the topologies.
"""
self.topo = topo
self.args = args
self.kwargs = kwargs
self.config = configparser.ConfigParser()
self.config.read('config.ini')
def up(self):
"""
Call the booter to the corresponding topology class.
"""
if self.topo == 'fw_lb':
config = utils.parse_config(self.config['FwLbTopo'])
fw_config = utils.parse_fw_config(self.config['FWaaS'])
FwLbTopo(opts=config,
fw_opts=fw_config,
session=self.kwargs['session'],
token=self.kwargs['token'],
neutron_endpoint=self.kwargs['neutron_endpoint']).up()
class FwLbTopo():
"""
Implements the Firewall and LoadBalancer topology:
- Two or three networks
- FaaS and LbaaS
- n instances as servers
- m instances as storage nodes
"""
def __init__(self,
opts=None,
fw_opts=None,
net_names=['net0','net1'],
net_prefixes=['10.0.0.0/24','10.0.1.0/24'],
subnet_names=['subnet0','subnet1'],
dns_nameservers=['8.8.8.8'],
router_name='r',
router_ports=['port0','port1'],
flavors=['tiny_personalizada','tiny_personalizada'],
images=['trusty-server-cloudimg-amd64-cnvr','trusty-server-cloudimg-amd64-cnvr'],
secgroups=['default','default'],
key_names=['my_key','my_key'],
instances=[3,3],
userdata=['userdata/balancer-ud.txt',
'userdata/server-ud.txt',
'userdata/storage-ud.txt'],
fixed_ips=None,
session=None,
token=None,
neutron_endpoint=None,
*args, **kwargs):
"""
Initialize the topology. Authorization with nova and neutron using
a session from keystone for nova and a token for neutron.
"""
self.fw_opts = fw_opts
self.net_names = opts.get('net_names', net_names)
self.net_prefixes = opts.get('net_prefixes', net_prefixes)
self.subnet_names = opts.get('subnet_names', subnet_names)
self.dns_nameservers = opts.get('dns_nameservers', dns_nameservers)
self.router_name = opts.get('router_name', router_name)
self.router_ports = opts.get('router_ports', router_ports)
self.flavors = opts.get('flavors', flavors)
self.images = opts.get('images', images)
self.secgroups = opts.get('secgroups', secgroups)
self.key_names = opts.get('key_names', key_names)
self.instances = opts.get('instances', instances)
self.userdata = opts.get('userdata', userdata)
self.fixed_ips = opts.get('fixed_ips')
if session is None:
raise ValueError('No session provided')
if token is None:
raise ValueError('No token provided')
if neutron_endpoint is None:
raise ValueError('No neutron_endpoint provided')
self.session = session
self.token = token
self.neutron_endpoint = neutron_endpoint
self.nets = []
self.subnets = []
self.servers = []
def _create_net(self,neutron=None, net_name=None):
"""
Create the network using the neutronclient.
"""
net_body = {'network':{
'name': net_name,
'admin_state_up': True
}
}
net = neutron.create_network(body=net_body)
return net['network']
def _create_subnet(self, neutron=None, subnet_name=None,
subnet_prefix=None, net_id=None):
"""
Create the subnet attached to a network using the
neutronclient.
"""
subnet_body = {'subnets':[{
'cidr': subnet_prefix,
'ip_version': 4,
'network_id': net_id,
'name': subnet_name
}
]}
subnet = neutron.create_subnet(body=subnet_body)
body = {'subnet':{
'dns_nameservers': self.dns_nameservers
}}
neutron.update_subnet(subnet['subnets'][0]['id'],
body)
return subnet['subnets'][0]
def _create_router(self, neutron=None, nova=None, net_name=None,
router_name=None, port_names=None):
"""
Create router and add a port to the subnet in the net
specified by the name
"""
if isinstance(router_name, list):
router_name = router_name[0]
net_ids = list()
if isinstance(net_name, list):
for net in net_name:
net = nova.networks.find(label=net)
net_id = net.id
net_ids.append(net_id)
else:
net = nova.networks.find(label=net_name)
net_id = net.id
net_ids.append(net_id)
ext_net = nova.networks.find(label='ExtNet')
ext_net_id = ext_net.id
# net_ids = [net_id, ext_net_id]
request = {'router': {'name': router_name,
'admin_state_up': True}}
router = neutron.create_router(request)
router_id = router['router']['id']
neutron.add_gateway_router(router_id, {'network_id': ext_net_id})
for net_id in net_ids:
subnet_id = None
for subnet in neutron.list_subnets()['subnets']:
if subnet['network_id'] == net_id:
subnet_id = subnet['id']
neutron.add_interface_router(router_id,
{'subnet_id': subnet_id})
def _boot_instance(self, nova=None, image=None, flavor=None, nets=None,
key_name=None, secgroups=None, name=None, userdata=None, fixed_ip=None,
count=1):
"""
Boot the instace/s using the novaclient.
"""
image = nova.images.find(name=image)
flavor = nova.flavors.find(name=flavor)
secgroups = [secgroups]
nics = []
if not isinstance(nets,list):
nets = [nets]
for net in nets:
net = nova.networks.find(label=net)
nics.append({'net-id':net.id})
if not fixed_ip is None:
for i, nic in enumerate(nics):
nic['v4-fixed-ip'] = fixed_ip[i]
if userdata is None:
f_userdata = None
else:
f_userdata = open(userdata, 'r')
instance = nova.servers.create(name=name, image=image, flavor=flavor,
key_name=key_name, nics=nics, max_count=count,
min_count=count, security_groups=secgroups,
userdata=f_userdata)
def up(self):
"""
Set up the topology.
"""
nova = novaclient.Client('2', session=self.session)
neutron = neutronclient.Client(endpoint_url=self.neutron_endpoint,
token=self.token)
neutron_if = NeutronIF()
# Create nets
logging.info('Creating networks...')
try:
for i in range(len(self.instances)):
self.nets.append(self._create_net(neutron=neutron,
net_name=self.net_names[i]))
except Exception as e:
logging.error('ERROR at creating networks:')
logging.error(e)
else:
logging.info('Success!')
# Create subnets into the created nets
logging.info('Creating subnetworks...')
try:
for i in range(len(self.instances)):
self.subnets.append(self._create_subnet(neutron=neutron,
subnet_name=self.subnet_names[i],
subnet_prefix=self.net_prefixes[i],
net_id=self.nets[i]['id']))
except Exception as e:
logging.error('ERROR at creating subnetworks')
logging.error(e)
else:
logging.info('Success!')
# Create router and connect to net
logging.info('Creating router...')
try:
self._create_router(neutron=neutron, nova=nova,
net_name=self.net_names[0],
router_name=self.router_name[0],
port_names=self.router_ports)
self._create_router(neutron=neutron, nova=nova,
net_name=self.net_names[1:],
router_name=self.router_name[1])
except Exception as e:
logging.error('ERROR at creating router')
logging.error(e)
else:
logging.info('Success!')
# Boot the load balancer instance
logging.info('Booting load balancer instance')
self._boot_instance(nova=nova, image=self.images[0],
flavor=self.flavors[0], nets=self.net_names[:2],
key_name=self.key_names[0],
secgroups=self.secgroups[0],
name='loadbalancer', userdata=self.userdata[0],
count=self.instances[0])
logging.info('Success!')
# Boot the server instances
logging.info("Booting server instances")
try:
self._boot_instance(nova=nova, image=self.images[1],
flavor=self.flavors[1], nets=self.net_names[1:],
key_name=self.key_names[1],
secgroups=self.secgroups[1],
name='server', userdata=self.userdata[1],
count=self.instances[1] )
except Exception as e:
logging.error('ERROR when creating servers')
logging.error(e)
else:
logging.info('Success!')
# Boot the storage instances
logging.info('Booting storage instances')
try:
self._boot_instance(nova=nova, image=self.images[2],
flavor=self.flavors[2], nets=self.net_names[2],
key_name=self.key_names[2],
secgroups=self.secgroups[2],
name='persist_1', count=1,
userdata=self.userdata[2],
fixed_ip=[self.fixed_ips[2]])
self._boot_instance(nova=nova, image=self.images[2],
flavor=self.flavors[2], nets=self.net_names[2],
key_name=self.key_names[2],
secgroups=self.secgroups[2],
name='persist_2', count=1,
userdata=self.userdata[3],
fixed_ip=[self.fixed_ips[3]])
except Exception as e:
logging.error('ERROR when creating storage instances')
logging.error(e)
else:
logging.info('Success!')
# Allocate a floating IP and associate to balancer instance
logging.info('Allocating IP')
try:
for server in nova.servers.list():
if server.name == 'loadbalancer':
id_ = server.id
nova.floating_ips.create(pool='ExtNet')
floating_ips = nova.floating_ips.list()
floating_ip = floating_ips[0].ip
nova.servers.add_floating_ip(server=id_,
address=floating_ip)
except Exception as e:
logging.error('ERROR when allocating IP')
logging.error(e)
else:
logging.info('Success!')
# Create Firewall
logging.info('Creating FWaaS')
try:
for fw_rule in self.fw_opts['rules']:
neutron_if.firewall_rule_create(fw_rule)
neutron_if.firewall_policy_create(name=self.fw_opts['policy_name'],
fw_rules=self.fw_opts['policy_rules'])
neutron_if.firewall_create(name=self.fw_opts['fw_name'],
fw_policy=self.fw_opts['policy_name'],
router=self.fw_opts['fw_router'])
except Exception as e:
logging.error('ERROR at creating FWaaS')
logging.error(e)
else:
logging.info('Success!')
|
{
"content_hash": "6ed402d5f1b3103198ac5a693c44b1bf",
"timestamp": "",
"source": "github",
"line_count": 341,
"max_line_length": 89,
"avg_line_length": 36.32551319648094,
"alnum_prop": 0.5393557762170017,
"repo_name": "nachtkatze/openstack-controller",
"id": "53ce2f774db699f92473d2e9e649c0e74cdf2586",
"size": "12387",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "boot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25851"
}
],
"symlink_target": ""
}
|
import pymongo
from scrapy.exceptions import DropItem
from scrapy.conf import settings
from scrapy import log
class MongoDBPipeline(object):
def __init__(self):
connection = pymongo.Connection(
settings['MONGODB_SERVER'], settings['MONGODB_PORT'])
db = connection[settings['MONGODB_DB']]
self.collection = db[settings['MONGODB_COLLECTION']]
def process_item(self, item, spider):
valid = True
for data in item:
# here we only check if the data is not null
# but we could do any crazy validation we want
if not data:
valid = False
raise DropItem(
"Missing %s course from %s" % (data, item['url']))
if valid:
self.collection.insert(dict(item))
log.msg("Item written to MongoDB database %s/%s" %
(settings['MONGODB_DB'], settings['MONGODB_COLLECTION']),
level=log.DEBUG, spider=spider)
return item
|
{
"content_hash": "e8bcd5efb10b447db552f8df64f4e9b0",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 77,
"avg_line_length": 34.233333333333334,
"alnum_prop": 0.5813047711781889,
"repo_name": "benregn/itu-courses",
"id": "a579805b2f2ddb73064f5fe52f84965cda249ba6",
"size": "1027",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "itu/pipelines.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19681"
},
{
"name": "Shell",
"bytes": "6734"
}
],
"symlink_target": ""
}
|
from .counter import Counter # noqa
from .functions import deep_update # noqa
from .functions import ensure_string, ensure_tuple # noqa
from .functions import fix_trailing_slash, remove_trailing_slash # noqa
from .functions import get_class_name, get_instance_name, get_method_name # noqa
from .functions import is_regex # noqa
from . import json2 # noqa
|
{
"content_hash": "32c6e9f5a60fdc1ac4bfec6383d0dc63",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 82,
"avg_line_length": 52.142857142857146,
"alnum_prop": 0.7589041095890411,
"repo_name": "ondrejkajinek/pyGrim",
"id": "fb923e0361e58c340873980a3f63e0ee4f51099f",
"size": "381",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pygrim/components/utils/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jinja",
"bytes": "3437"
},
{
"name": "Makefile",
"bytes": "3972"
},
{
"name": "Python",
"bytes": "171412"
},
{
"name": "Shell",
"bytes": "164"
}
],
"symlink_target": ""
}
|
"""Support for monitoring the Transmission BitTorrent client API."""
import logging
from homeassistant.const import CONF_NAME, STATE_IDLE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import DOMAIN, SENSOR_TYPES, STATE_ATTR_TORRENT_INFO
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Transmission sensors."""
tm_client = hass.data[DOMAIN][config_entry.entry_id]
name = config_entry.data[CONF_NAME]
dev = []
for sensor_type in SENSOR_TYPES:
dev.append(
TransmissionSensor(
sensor_type,
tm_client,
name,
SENSOR_TYPES[sensor_type][0],
SENSOR_TYPES[sensor_type][1],
)
)
async_add_entities(dev, True)
class TransmissionSensor(Entity):
"""Representation of a Transmission sensor."""
def __init__(
self, sensor_type, tm_client, client_name, sensor_name, unit_of_measurement
):
"""Initialize the sensor."""
self._name = sensor_name
self._state = None
self._tm_client = tm_client
self._unit_of_measurement = unit_of_measurement
self._data = None
self.client_name = client_name
self.type = sensor_type
self.unsub_update = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.client_name} {self._name}"
@property
def unique_id(self):
"""Return the unique id of the entity."""
return f"{self._tm_client.api.host}-{self.name}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def should_poll(self):
"""Return the polling requirement for this sensor."""
return False
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._tm_client.api.available
@property
def device_state_attributes(self):
"""Return the state attributes, if any."""
if self._tm_client.api.started_torrent_dict and self.type == "started_torrents":
return {STATE_ATTR_TORRENT_INFO: self._tm_client.api.started_torrent_dict}
return None
async def async_added_to_hass(self):
"""Handle entity which will be added."""
self.unsub_update = async_dispatcher_connect(
self.hass,
self._tm_client.api.signal_update,
self._schedule_immediate_update,
)
@callback
def _schedule_immediate_update(self):
self.async_schedule_update_ha_state(True)
async def will_remove_from_hass(self):
"""Unsubscribe from update dispatcher."""
if self.unsub_update:
self.unsub_update()
self.unsub_update = None
def update(self):
"""Get the latest data from Transmission and updates the state."""
self._data = self._tm_client.api.data
if self.type == "completed_torrents":
self._state = self._tm_client.api.get_completed_torrent_count()
elif self.type == "started_torrents":
self._state = self._tm_client.api.get_started_torrent_count()
if self.type == "current_status":
if self._data:
upload = self._data.uploadSpeed
download = self._data.downloadSpeed
if upload > 0 and download > 0:
self._state = "Up/Down"
elif upload > 0 and download == 0:
self._state = "Seeding"
elif upload == 0 and download > 0:
self._state = "Downloading"
else:
self._state = STATE_IDLE
else:
self._state = None
if self._data:
if self.type == "download_speed":
mb_spd = float(self._data.downloadSpeed)
mb_spd = mb_spd / 1024 / 1024
self._state = round(mb_spd, 2 if mb_spd < 0.1 else 1)
elif self.type == "upload_speed":
mb_spd = float(self._data.uploadSpeed)
mb_spd = mb_spd / 1024 / 1024
self._state = round(mb_spd, 2 if mb_spd < 0.1 else 1)
elif self.type == "active_torrents":
self._state = self._data.activeTorrentCount
elif self.type == "paused_torrents":
self._state = self._data.pausedTorrentCount
elif self.type == "total_torrents":
self._state = self._data.torrentCount
|
{
"content_hash": "fdd01415b2007ddf285c4bd8edcc5538",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 88,
"avg_line_length": 34.05555555555556,
"alnum_prop": 0.5793230016313213,
"repo_name": "postlund/home-assistant",
"id": "0db731d6f01322654d78071aed47b10c295d460c",
"size": "4904",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/transmission/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20215859"
},
{
"name": "Shell",
"bytes": "6663"
}
],
"symlink_target": ""
}
|
"""Base class for TopicModel's REST calls
https://bigml.com/api/topicmodels
"""
try:
import simplejson as json
except ImportError:
import json
from bigml.api_handlers.resourcehandler import ResourceHandlerMixin
from bigml.api_handlers.resourcehandler import check_resource_type, \
resource_is_ready
from bigml.constants import TOPIC_MODEL_PATH
class TopicModelHandlerMixin(ResourceHandlerMixin):
"""This class is used by the BigML class as
a mixin that provides the REST calls models. It should not
be instantiated independently.
"""
def __init__(self):
"""Initializes the TopicModelHandler. This class is intended to be
used as a mixin on ResourceHandler, that inherits its
attributes and basic method from BigMLConnection, and must not be
instantiated independently.
"""
self.topic_model_url = self.url + TOPIC_MODEL_PATH
def create_topic_model(self, datasets, args=None, wait_time=3, retries=10):
"""Creates a Topic Model from a `dataset` or a list o `datasets`.
"""
create_args = self._set_create_from_datasets_args(
datasets, args=args, wait_time=wait_time, retries=retries)
body = json.dumps(create_args)
return self._create(self.topic_model_url, body)
def get_topic_model(self, topic_model, query_string='',
shared_username=None, shared_api_key=None):
"""Retrieves a Topic Model.
The topic_model parameter should be a string containing the
topic model ID or the dict returned by create_topic_model.
As the topic model is an evolving object that is processed
until it reaches the FINISHED or FAULTY state, the function will
return a dict that encloses the topic model values and state info
available at the time it is called.
If this is a shared topic model, the username and sharing api key
must also be provided.
"""
check_resource_type(topic_model, TOPIC_MODEL_PATH,
message="A Topic Model id is needed.")
return self.get_resource(topic_model,
query_string=query_string,
shared_username=shared_username,
shared_api_key=shared_api_key)
def topic_model_is_ready(self, topic_model, **kwargs):
"""Checks whether a topic model's status is FINISHED.
"""
check_resource_type(topic_model, TOPIC_MODEL_PATH,
message="A topic model id is needed.")
resource = self.get_topic_model(topic_model, **kwargs)
return resource_is_ready(resource)
def list_topic_models(self, query_string=''):
"""Lists all your Topic Models.
"""
return self._list(self.topic_model_url, query_string)
def update_topic_model(self, topic_model, changes):
"""Updates a Topic Model.
"""
check_resource_type(topic_model, TOPIC_MODEL_PATH,
message="A topic model id is needed.")
return self.update_resource(topic_model, changes)
def delete_topic_model(self, topic_model):
"""Deletes a Topic Model.
"""
check_resource_type(topic_model, TOPIC_MODEL_PATH,
message="A topic model id is needed.")
return self.delete_resource(topic_model)
def clone_topic_model(self, topic_model,
args=None, wait_time=3, retries=10):
"""Creates a cloned topic model from an existing `topic model`
"""
create_args = self._set_clone_from_args(
topic_model, "topicmodel", args=args, wait_time=wait_time,
retries=retries)
body = json.dumps(create_args)
return self._create(self.topic_model_url, body)
|
{
"content_hash": "3e2401bfd422a4db850a388255f5c0b6",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 79,
"avg_line_length": 36.61682242990654,
"alnum_prop": 0.6186830015313936,
"repo_name": "jaor/python",
"id": "6a1d0bb96fbe28355d202b2177f4b1d1a1c605a2",
"size": "4552",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bigml/api_handlers/topicmodelhandler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1799772"
}
],
"symlink_target": ""
}
|
"""This module contains objects representing Telegram bot command scopes."""
from typing import Any, Union, Optional, TYPE_CHECKING, Dict, Type
from telegram import TelegramObject, constants
from telegram.utils.types import JSONDict
if TYPE_CHECKING:
from telegram import Bot
class BotCommandScope(TelegramObject):
"""Base class for objects that represent the scope to which bot commands are applied.
Currently, the following 7 scopes are supported:
* :class:`telegram.BotCommandScopeDefault`
* :class:`telegram.BotCommandScopeAllPrivateChats`
* :class:`telegram.BotCommandScopeAllGroupChats`
* :class:`telegram.BotCommandScopeAllChatAdministrators`
* :class:`telegram.BotCommandScopeChat`
* :class:`telegram.BotCommandScopeChatAdministrators`
* :class:`telegram.BotCommandScopeChatMember`
Objects of this class are comparable in terms of equality. Two objects of this class are
considered equal, if their :attr:`type` is equal. For subclasses with additional attributes,
the notion of equality is overridden.
Note:
Please see the `official docs`_ on how Telegram determines which commands to display.
.. _`official docs`: https://core.telegram.org/bots/api#determining-list-of-commands
.. versionadded:: 13.7
Args:
type (:obj:`str`): Scope type.
Attributes:
type (:obj:`str`): Scope type.
"""
__slots__ = ('type', '_id_attrs')
DEFAULT = constants.BOT_COMMAND_SCOPE_DEFAULT
""":const:`telegram.constants.BOT_COMMAND_SCOPE_DEFAULT`"""
ALL_PRIVATE_CHATS = constants.BOT_COMMAND_SCOPE_ALL_PRIVATE_CHATS
""":const:`telegram.constants.BOT_COMMAND_SCOPE_ALL_PRIVATE_CHATS`"""
ALL_GROUP_CHATS = constants.BOT_COMMAND_SCOPE_ALL_GROUP_CHATS
""":const:`telegram.constants.BOT_COMMAND_SCOPE_ALL_GROUP_CHATS`"""
ALL_CHAT_ADMINISTRATORS = constants.BOT_COMMAND_SCOPE_ALL_CHAT_ADMINISTRATORS
""":const:`telegram.constants.BOT_COMMAND_SCOPE_ALL_CHAT_ADMINISTRATORS`"""
CHAT = constants.BOT_COMMAND_SCOPE_CHAT
""":const:`telegram.constants.BOT_COMMAND_SCOPE_CHAT`"""
CHAT_ADMINISTRATORS = constants.BOT_COMMAND_SCOPE_CHAT_ADMINISTRATORS
""":const:`telegram.constants.BOT_COMMAND_SCOPE_CHAT_ADMINISTRATORS`"""
CHAT_MEMBER = constants.BOT_COMMAND_SCOPE_CHAT_MEMBER
""":const:`telegram.constants.BOT_COMMAND_SCOPE_CHAT_MEMBER`"""
def __init__(self, type: str, **_kwargs: Any):
self.type = type
self._id_attrs = (self.type,)
@classmethod
def de_json(cls, data: Optional[JSONDict], bot: 'Bot') -> Optional['BotCommandScope']:
"""Converts JSON data to the appropriate :class:`BotCommandScope` object, i.e. takes
care of selecting the correct subclass.
Args:
data (Dict[:obj:`str`, ...]): The JSON data.
bot (:class:`telegram.Bot`): The bot associated with this object.
Returns:
The Telegram object.
"""
data = cls._parse_data(data)
if not data:
return None
_class_mapping: Dict[str, Type['BotCommandScope']] = {
cls.DEFAULT: BotCommandScopeDefault,
cls.ALL_PRIVATE_CHATS: BotCommandScopeAllPrivateChats,
cls.ALL_GROUP_CHATS: BotCommandScopeAllGroupChats,
cls.ALL_CHAT_ADMINISTRATORS: BotCommandScopeAllChatAdministrators,
cls.CHAT: BotCommandScopeChat,
cls.CHAT_ADMINISTRATORS: BotCommandScopeChatAdministrators,
cls.CHAT_MEMBER: BotCommandScopeChatMember,
}
if cls is BotCommandScope:
return _class_mapping.get(data['type'], cls)(**data, bot=bot)
return cls(**data)
class BotCommandScopeDefault(BotCommandScope):
"""Represents the default scope of bot commands. Default commands are used if no commands with
a `narrower scope`_ are specified for the user.
.. _`narrower scope`: https://core.telegram.org/bots/api#determining-list-of-commands
.. versionadded:: 13.7
Attributes:
type (:obj:`str`): Scope type :attr:`telegram.BotCommandScope.DEFAULT`.
"""
__slots__ = ()
def __init__(self, **_kwargs: Any):
super().__init__(type=BotCommandScope.DEFAULT)
class BotCommandScopeAllPrivateChats(BotCommandScope):
"""Represents the scope of bot commands, covering all private chats.
.. versionadded:: 13.7
Attributes:
type (:obj:`str`): Scope type :attr:`telegram.BotCommandScope.ALL_PRIVATE_CHATS`.
"""
__slots__ = ()
def __init__(self, **_kwargs: Any):
super().__init__(type=BotCommandScope.ALL_PRIVATE_CHATS)
class BotCommandScopeAllGroupChats(BotCommandScope):
"""Represents the scope of bot commands, covering all group and supergroup chats.
.. versionadded:: 13.7
Attributes:
type (:obj:`str`): Scope type :attr:`telegram.BotCommandScope.ALL_GROUP_CHATS`.
"""
__slots__ = ()
def __init__(self, **_kwargs: Any):
super().__init__(type=BotCommandScope.ALL_GROUP_CHATS)
class BotCommandScopeAllChatAdministrators(BotCommandScope):
"""Represents the scope of bot commands, covering all group and supergroup chat administrators.
.. versionadded:: 13.7
Attributes:
type (:obj:`str`): Scope type :attr:`telegram.BotCommandScope.ALL_CHAT_ADMINISTRATORS`.
"""
__slots__ = ()
def __init__(self, **_kwargs: Any):
super().__init__(type=BotCommandScope.ALL_CHAT_ADMINISTRATORS)
class BotCommandScopeChat(BotCommandScope):
"""Represents the scope of bot commands, covering a specific chat.
Objects of this class are comparable in terms of equality. Two objects of this class are
considered equal, if their :attr:`type` and :attr:`chat_id` are equal.
.. versionadded:: 13.7
Args:
chat_id (:obj:`str` | :obj:`int`): Unique identifier for the target chat or username of the
target supergroup (in the format ``@supergroupusername``)
Attributes:
type (:obj:`str`): Scope type :attr:`telegram.BotCommandScope.CHAT`.
chat_id (:obj:`str` | :obj:`int`): Unique identifier for the target chat or username of the
target supergroup (in the format ``@supergroupusername``)
"""
__slots__ = ('chat_id',)
def __init__(self, chat_id: Union[str, int], **_kwargs: Any):
super().__init__(type=BotCommandScope.CHAT)
self.chat_id = (
chat_id if isinstance(chat_id, str) and chat_id.startswith('@') else int(chat_id)
)
self._id_attrs = (self.type, self.chat_id)
class BotCommandScopeChatAdministrators(BotCommandScope):
"""Represents the scope of bot commands, covering all administrators of a specific group or
supergroup chat.
Objects of this class are comparable in terms of equality. Two objects of this class are
considered equal, if their :attr:`type` and :attr:`chat_id` are equal.
.. versionadded:: 13.7
Args:
chat_id (:obj:`str` | :obj:`int`): Unique identifier for the target chat or username of the
target supergroup (in the format ``@supergroupusername``)
Attributes:
type (:obj:`str`): Scope type :attr:`telegram.BotCommandScope.CHAT_ADMINISTRATORS`.
chat_id (:obj:`str` | :obj:`int`): Unique identifier for the target chat or username of the
target supergroup (in the format ``@supergroupusername``)
"""
__slots__ = ('chat_id',)
def __init__(self, chat_id: Union[str, int], **_kwargs: Any):
super().__init__(type=BotCommandScope.CHAT_ADMINISTRATORS)
self.chat_id = (
chat_id if isinstance(chat_id, str) and chat_id.startswith('@') else int(chat_id)
)
self._id_attrs = (self.type, self.chat_id)
class BotCommandScopeChatMember(BotCommandScope):
"""Represents the scope of bot commands, covering a specific member of a group or supergroup
chat.
Objects of this class are comparable in terms of equality. Two objects of this class are
considered equal, if their :attr:`type`, :attr:`chat_id` and :attr:`user_id` are equal.
.. versionadded:: 13.7
Args:
chat_id (:obj:`str` | :obj:`int`): Unique identifier for the target chat or username of the
target supergroup (in the format ``@supergroupusername``)
user_id (:obj:`int`): Unique identifier of the target user.
Attributes:
type (:obj:`str`): Scope type :attr:`telegram.BotCommandScope.CHAT_MEMBER`.
chat_id (:obj:`str` | :obj:`int`): Unique identifier for the target chat or username of the
target supergroup (in the format ``@supergroupusername``)
user_id (:obj:`int`): Unique identifier of the target user.
"""
__slots__ = ('chat_id', 'user_id')
def __init__(self, chat_id: Union[str, int], user_id: int, **_kwargs: Any):
super().__init__(type=BotCommandScope.CHAT_MEMBER)
self.chat_id = (
chat_id if isinstance(chat_id, str) and chat_id.startswith('@') else int(chat_id)
)
self.user_id = int(user_id)
self._id_attrs = (self.type, self.chat_id, self.user_id)
|
{
"content_hash": "5f38796345cc2ae1d0a3a16cb02bd2ba",
"timestamp": "",
"source": "github",
"line_count": 244,
"max_line_length": 99,
"avg_line_length": 37.442622950819676,
"alnum_prop": 0.6609019264448336,
"repo_name": "tzpBingo/github-trending",
"id": "283f7682b2044a560b860556acb6444dacbeb851",
"size": "9969",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "codespace/python/telegram/botcommandscope.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "11470"
},
{
"name": "HTML",
"bytes": "1543"
},
{
"name": "Python",
"bytes": "49985109"
},
{
"name": "Shell",
"bytes": "18039"
}
],
"symlink_target": ""
}
|
from django.urls import include, re_path
from .versioning import __versions__
apipatterns = [
re_path(
rf"^{version.__version__}/",
include(
(f"{version.__name__}.urls", "bananas"),
namespace=version.__version__,
),
)
for version in __versions__
]
urlpatterns = [re_path(r"^", include((apipatterns, "bananas"), namespace="bananas"))]
|
{
"content_hash": "35b858ca9fca242f775d4b3f2a1e6bfb",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 85,
"avg_line_length": 23.41176470588235,
"alnum_prop": 0.5678391959798995,
"repo_name": "5monkeys/django-bananas",
"id": "2fcca266b4ddcb01162f728d981272cea545c77c",
"size": "398",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/bananas/admin/api/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "39504"
},
{
"name": "Dockerfile",
"bytes": "590"
},
{
"name": "HTML",
"bytes": "8098"
},
{
"name": "JavaScript",
"bytes": "6506"
},
{
"name": "Makefile",
"bytes": "1508"
},
{
"name": "Python",
"bytes": "146427"
}
],
"symlink_target": ""
}
|
import json
from django.contrib import messages as contrib_messages
from django.contrib.auth.models import User
from django.http import (HttpResponseRedirect, HttpResponse,
HttpResponseBadRequest)
from django.views.decorators.http import require_POST
from django.shortcuts import get_object_or_404, redirect, render
from mobility.decorators import mobile_template
from multidb.pinning import mark_as_write
from kitsune.sumo.utils import is_ratelimited
from statsd import statsd
from tower import ugettext as _, ungettext
from kitsune.access.decorators import login_required
from kitsune.messages import MESSAGES_PER_PAGE
from kitsune.messages.forms import MessageForm, ReplyForm
from kitsune.messages.models import InboxMessage, OutboxMessage
from kitsune.messages.utils import send_message
from kitsune.sumo.urlresolvers import reverse
from kitsune.sumo.utils import paginate
@login_required
@mobile_template('messages/{mobile/}inbox.html')
def inbox(request, template):
user = request.user
messages = InboxMessage.objects.filter(to=user).order_by('-created')
count = messages.count()
messages = paginate(
request, messages, per_page=MESSAGES_PER_PAGE, count=count)
return render(request, template, {'msgs': messages})
@login_required
@mobile_template('messages/{mobile/}read.html')
def read(request, template, msgid):
message = get_object_or_404(InboxMessage, pk=msgid, to=request.user)
was_new = message.unread
if was_new:
message.update(read=True)
initial = {'to': message.sender, 'in_reply_to': message.pk}
form = ReplyForm(initial=initial)
response = render(request, template, {
'message': message, 'form': form})
if was_new:
response = mark_as_write(response)
return response
@login_required
@mobile_template('messages/{mobile/}read-outbox.html')
def read_outbox(request, template, msgid):
message = get_object_or_404(OutboxMessage, pk=msgid, sender=request.user)
return render(request, template, {
'message': _add_recipients(message)})
@login_required
@mobile_template('messages/{mobile/}outbox.html')
def outbox(request, template):
user = request.user
messages = OutboxMessage.objects.filter(sender=user).order_by('-created')
count = messages.count()
messages = paginate(
request, messages, per_page=MESSAGES_PER_PAGE, count=count)
for msg in messages.object_list:
_add_recipients(msg)
return render(request, template, {'msgs': messages})
@login_required
@mobile_template('messages/{mobile/}new.html')
def new_message(request, template):
"""Send a new private message."""
to = request.GET.get('to')
if to:
try:
for username in to.split(','):
User.objects.get(username=username)
except User.DoesNotExist:
contrib_messages.add_message(
request, contrib_messages.ERROR,
_('Invalid username provided. Enter a new username below.'))
return HttpResponseRedirect(reverse('messages.new'))
message = request.GET.get('message')
form = MessageForm(request.POST or None, initial={'to': to, 'message': message})
if (request.method == 'POST' and form.is_valid() and
not is_ratelimited(request, 'primate-message-day', '50/d')):
send_message(form.cleaned_data['to'], form.cleaned_data['message'],
request.user)
if form.cleaned_data['in_reply_to']:
irt = form.cleaned_data['in_reply_to']
try:
m = InboxMessage.objects.get(pk=irt, to=request.user)
m.update(replied=True)
except InboxMessage.DoesNotExist:
pass
contrib_messages.add_message(request, contrib_messages.SUCCESS,
_('Your message was sent!'))
return HttpResponseRedirect(reverse('messages.inbox'))
return render(request, template, {'form': form})
@login_required
def bulk_action(request, msgtype='inbox'):
"""Apply action to selected messages."""
msgids = request.POST.getlist('id')
if len(msgids) == 0:
contrib_messages.add_message(request, contrib_messages.ERROR,
_("No messages selected. Please try again."))
else:
if 'delete' in request.POST:
return delete(request, msgtype=msgtype)
elif 'mark_read' in request.POST and msgtype == 'inbox':
messages = InboxMessage.objects.filter(pk__in=msgids,
to=request.user)
messages.update(read=True)
elif 'mark_unread' in request.POST and msgtype == 'inbox':
messages = InboxMessage.objects.filter(pk__in=msgids,
to=request.user)
messages.update(read=False)
return redirect('messages.%s' % msgtype)
@login_required
@mobile_template('messages/{mobile/}delete.html')
def delete(request, template, msgid=None, msgtype='inbox'):
if msgid:
msgids = [msgid]
else:
try:
msgids = [int(m) for m in request.POST.getlist('id')]
except ValueError:
return HttpResponseBadRequest()
if msgtype == 'inbox':
messages = InboxMessage.objects.filter(pk__in=msgids, to=request.user)
else:
messages = OutboxMessage.objects.filter(pk__in=msgids,
sender=request.user)
if request.method == 'POST' and 'confirmed' in request.POST:
if messages.count() != len(msgids):
contrib_messages.add_message(request, contrib_messages.ERROR,
_("Messages didn't add up. Try again."))
else:
messages.delete()
msg = ungettext(u'The message was deleted!',
u'The messages were deleted!',
len(msgids))
contrib_messages.add_message(request, contrib_messages.SUCCESS,
msg)
if request.is_ajax():
return HttpResponse(json.dumps({'message': m} for m in messages))
return HttpResponseRedirect(reverse('messages.{t}'.format(t=msgtype)))
if msgtype == 'outbox':
for message in messages:
_add_recipients(message)
return render(request, template, {
'msgs': messages, 'msgid': msgid, 'msgtype': msgtype})
@require_POST
@login_required
def preview_async(request):
"""Ajax preview of posts."""
statsd.incr('forums.preview')
m = OutboxMessage(sender=request.user,
message=request.POST.get('content', ''))
return render(request, 'messages/includes/message_preview.html', {
'message': m})
def _add_recipients(msg):
msg.recipients = msg.to.count()
if msg.recipients == 1:
msg.recipient = msg.to.all()[0]
else:
msg.recipient = None
return msg
|
{
"content_hash": "21f2007e741b5dfdfbc022c83e9343a6",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 84,
"avg_line_length": 35.619289340101524,
"alnum_prop": 0.6300413282029357,
"repo_name": "Osmose/kitsune",
"id": "f6e5765456f8478108f4c9555e32179520db5953",
"size": "7017",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "kitsune/messages/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "2694"
},
{
"name": "CSS",
"bytes": "287014"
},
{
"name": "HTML",
"bytes": "618218"
},
{
"name": "JavaScript",
"bytes": "770369"
},
{
"name": "Python",
"bytes": "2805461"
},
{
"name": "Shell",
"bytes": "12519"
},
{
"name": "Smarty",
"bytes": "1918"
}
],
"symlink_target": ""
}
|
import sys
import numpy
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
ext_modules = [
Extension("nerven.epoc._parse",
sources=["src/nerven/epoc/_parse.pyx"],
include_dirs=[".", numpy.get_include()]),
]
setup(name='nerven',
version='0.1',
author='Sharif Olorin',
author_email='sio@tesser.org',
requires=[
'wxmpl',
'numpy',
],
cmdclass={'build_ext' : build_ext},
ext_modules=ext_modules,
package_dir={'' : 'src'},
packages=['nerven', 'nerven.epoc', 'nerven.writer'],
package_data={'nerven' : ['img/*.png']},
scripts=['src/nerven_gui'],
data_files=[('bin', ['src/nerven_gui'])],
)
|
{
"content_hash": "b657736782c0f9c131fd363fe7ec8794",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 58,
"avg_line_length": 25.733333333333334,
"alnum_prop": 0.5803108808290155,
"repo_name": "fractalcat/nerven",
"id": "36a1a048eabeab6406cdc96b0c32f4dc5191b891",
"size": "772",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "159782"
},
{
"name": "Makefile",
"bytes": "361"
},
{
"name": "Python",
"bytes": "40751"
}
],
"symlink_target": ""
}
|
import warnings
import pytest
import numpy as np
from numpy import testing as npt
import erfa
from astropy import units as u
from astropy.time import Time
from astropy.coordinates.builtin_frames import ICRS, AltAz
from astropy.coordinates.builtin_frames.utils import get_jd12
from astropy.coordinates import EarthLocation
from astropy.coordinates import SkyCoord
from astropy.utils import iers
from astropy.coordinates.angle_utilities import golden_spiral_grid
# These fixtures are used in test_iau_fullstack
@pytest.fixture(scope="function")
def fullstack_icrs():
rep = golden_spiral_grid(size=1000)
return ICRS(rep)
@pytest.fixture(scope="function")
def fullstack_fiducial_altaz(fullstack_icrs):
altazframe = AltAz(location=EarthLocation(lat=0*u.deg, lon=0*u.deg, height=0*u.m),
obstime=Time('J2000'))
with warnings.catch_warnings(): # Ignore remote_data warning
warnings.simplefilter('ignore')
result = fullstack_icrs.transform_to(altazframe)
return result
@pytest.fixture(scope="function", params=['J2000.1', 'J2010'])
def fullstack_times(request):
return Time(request.param)
@pytest.fixture(scope="function", params=[(0, 0, 0), (23, 0, 0), (-70, 0, 0), (0, 100, 0), (23, 0, 3000)])
def fullstack_locations(request):
return EarthLocation(lat=request.param[0]*u.deg, lon=request.param[0]*u.deg,
height=request.param[0]*u.m)
@pytest.fixture(scope="function",
params=[(0*u.bar, 0*u.deg_C, 0, 1*u.micron),
(1*u.bar, 0*u.deg_C, 0*u.one, 1*u.micron),
(1*u.bar, 10*u.deg_C, 0, 1*u.micron),
(1*u.bar, 0*u.deg_C, 50*u.percent, 1*u.micron),
(1*u.bar, 0*u.deg_C, 0, 21*u.cm)])
def fullstack_obsconditions(request):
return request.param
def _erfa_check(ira, idec, astrom):
"""
This function does the same thing the astropy layer is supposed to do, but
all in erfa
"""
cra, cdec = erfa.atciq(ira, idec, 0, 0, 0, 0, astrom)
az, zen, ha, odec, ora = erfa.atioq(cra, cdec, astrom)
alt = np.pi/2-zen
cra2, cdec2 = erfa.atoiq('A', az, zen, astrom)
ira2, idec2 = erfa.aticq(cra2, cdec2, astrom)
dct = locals()
del dct['astrom']
return dct
def test_iau_fullstack(fullstack_icrs, fullstack_fiducial_altaz,
fullstack_times, fullstack_locations,
fullstack_obsconditions):
"""
Test the full transform from ICRS <-> AltAz
"""
# create the altaz frame
altazframe = AltAz(obstime=fullstack_times, location=fullstack_locations,
pressure=fullstack_obsconditions[0],
temperature=fullstack_obsconditions[1],
relative_humidity=fullstack_obsconditions[2],
obswl=fullstack_obsconditions[3])
aacoo = fullstack_icrs.transform_to(altazframe)
# compare aacoo to the fiducial AltAz - should always be different
assert np.all(np.abs(aacoo.alt - fullstack_fiducial_altaz.alt) > 50*u.milliarcsecond)
assert np.all(np.abs(aacoo.az - fullstack_fiducial_altaz.az) > 50*u.milliarcsecond)
# if the refraction correction is included, we *only* do the comparisons
# where altitude >5 degrees. The SOFA guides imply that below 5 is where
# where accuracy gets more problematic, and testing reveals that alt<~0
# gives garbage round-tripping, and <10 can give ~1 arcsec uncertainty
if fullstack_obsconditions[0].value == 0:
# but if there is no refraction correction, check everything
msk = slice(None)
tol = 5*u.microarcsecond
else:
msk = aacoo.alt > 5*u.deg
# most of them aren't this bad, but some of those at low alt are offset
# this much. For alt > 10, this is always better than 100 masec
tol = 750*u.milliarcsecond
# now make sure the full stack round-tripping works
icrs2 = aacoo.transform_to(ICRS())
adras = np.abs(fullstack_icrs.ra - icrs2.ra)[msk]
addecs = np.abs(fullstack_icrs.dec - icrs2.dec)[msk]
assert np.all(adras < tol), f'largest RA change is {np.max(adras.arcsec * 1000)} mas, > {tol}'
assert np.all(addecs < tol), f'largest Dec change is {np.max(addecs.arcsec * 1000)} mas, > {tol}'
# check that we're consistent with the ERFA alt/az result
iers_tab = iers.earth_orientation_table.get()
xp, yp = u.Quantity(iers_tab.pm_xy(fullstack_times)).to_value(u.radian)
lon = fullstack_locations.geodetic[0].to_value(u.radian)
lat = fullstack_locations.geodetic[1].to_value(u.radian)
height = fullstack_locations.geodetic[2].to_value(u.m)
jd1, jd2 = get_jd12(fullstack_times, 'utc')
pressure = fullstack_obsconditions[0].to_value(u.hPa)
temperature = fullstack_obsconditions[1].to_value(u.deg_C)
# Relative humidity can be a quantity or a number.
relative_humidity = u.Quantity(fullstack_obsconditions[2], u.one).value
obswl = fullstack_obsconditions[3].to_value(u.micron)
astrom, eo = erfa.apco13(jd1, jd2,
fullstack_times.delta_ut1_utc,
lon, lat, height,
xp, yp,
pressure, temperature, relative_humidity,
obswl)
erfadct = _erfa_check(fullstack_icrs.ra.rad, fullstack_icrs.dec.rad, astrom)
npt.assert_allclose(erfadct['alt'], aacoo.alt.radian, atol=1e-7)
npt.assert_allclose(erfadct['az'], aacoo.az.radian, atol=1e-7)
def test_fiducial_roudtrip(fullstack_icrs, fullstack_fiducial_altaz):
"""
Test the full transform from ICRS <-> AltAz
"""
aacoo = fullstack_icrs.transform_to(fullstack_fiducial_altaz)
# make sure the round-tripping works
icrs2 = aacoo.transform_to(ICRS())
npt.assert_allclose(fullstack_icrs.ra.deg, icrs2.ra.deg)
npt.assert_allclose(fullstack_icrs.dec.deg, icrs2.dec.deg)
def test_future_altaz():
"""
While this does test the full stack, it is mostly meant to check that a
warning is raised when attempting to get to AltAz in the future (beyond
IERS tables)
"""
from astropy.utils.exceptions import AstropyWarning
# this is an ugly hack to get the warning to show up even if it has already
# appeared
from astropy.coordinates.builtin_frames import utils
if hasattr(utils, '__warningregistry__'):
utils.__warningregistry__.clear()
location = EarthLocation(lat=0*u.deg, lon=0*u.deg)
t = Time('J2161')
# check that these message(s) appear among any other warnings. If tests are run with
# --remote-data then the IERS table will be an instance of IERS_Auto which is
# assured of being "fresh". In this case getting times outside the range of the
# table does not raise an exception. Only if using IERS_B (which happens without
# --remote-data, i.e. for all CI testing) do we expect another warning.
with pytest.warns(AstropyWarning, match=r"Tried to get polar motions for "
"times after IERS data is valid.*") as found_warnings:
SkyCoord(1*u.deg, 2*u.deg).transform_to(AltAz(location=location, obstime=t))
if isinstance(iers.earth_orientation_table.get(), iers.IERS_B):
messages_found = ["(some) times are outside of range covered by IERS "
"table." in str(w.message) for w in found_warnings]
assert any(messages_found)
|
{
"content_hash": "4e9928db78e3427bb3d003a0de3b5161",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 106,
"avg_line_length": 41.96629213483146,
"alnum_prop": 0.6550200803212851,
"repo_name": "StuartLittlefair/astropy",
"id": "0dac266b2d23a46584894777ea516aa4534f5630",
"size": "7535",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "astropy/coordinates/tests/test_iau_fullstack.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "11034753"
},
{
"name": "C++",
"bytes": "47001"
},
{
"name": "Cython",
"bytes": "78631"
},
{
"name": "HTML",
"bytes": "1172"
},
{
"name": "Lex",
"bytes": "183333"
},
{
"name": "M4",
"bytes": "18757"
},
{
"name": "Makefile",
"bytes": "52457"
},
{
"name": "Python",
"bytes": "12224600"
},
{
"name": "Shell",
"bytes": "17024"
},
{
"name": "TeX",
"bytes": "853"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import argparse
import boto3
import botocore
import os
import sys
import aws_sudo.CommandParser
from six.moves import configparser
def sudo(cmd_args):
profile_config = read_config(cmd_args.profile)
credentials = {}
profile_config['session_timeout'] = cmd_args.session_timeout
profile_config['mfa_code'] = cmd_args.mfa_code
credentials = get_credentials(profile_config)
if cmd_args.mode is 'in_place':
update_credentials(cmd_args.profile, credentials)
elif cmd_args.mode is 'export':
print_exports(credentials)
elif cmd_args.mode is 'proxy':
proxy_command(cmd_args.command, cmd_args.command_args, credentials)
def read_config(profile):
"""This reads our config files automatically, and combines config and
credentials files for us"""
profiles = botocore.session.get_session().full_config.get('profiles', {})
# Checks for the passed in profile, mostly for sanity
if profile not in profiles:
print("Profile '%s' does not exist in the config file." % profile)
quit(2)
# save profile name for future use
profiles[profile]['profile_name'] = profile
return profiles[profile]
def print_exports(credentials):
# Set AWS/Boto environemnt variables before executing target command
print('export', end=' ')
print('AWS_ACCESS_KEY_ID=' + (credentials['AccessKeyId']), end=' ')
print('AWS_SECRET_ACCESS_KEY=' + (credentials['SecretAccessKey']), end=' ')
print('AWS_SESSION_TOKEN=' + (credentials['SessionToken']), end=' ')
print('AWS_SECURITY_TOKEN=' + (credentials['SessionToken']), end='')
def update_credentials(profile, credentials):
credentials_file = os.path.expanduser('~/.aws/credentials')
config = configparser.ConfigParser()
config.read(credentials_file)
# Create profile section in credentials file
if not config.has_section(profile):
config.add_section(profile)
# Set access credentials
# `aws_security_token` is used by boto
# `aws_session_token` is used by aws cli
config.set(
profile, 'aws_access_key_id', credentials['AccessKeyId'])
config.set(
profile, 'aws_secret_access_key', credentials['SecretAccessKey'])
config.set(
profile, 'aws_session_token', credentials['SessionToken'])
config.set(
profile, 'aws_security_token', credentials['SessionToken'])
# Update credentials file
with open(credentials_file, 'w') as credentials_file:
config.write(credentials_file)
print(
"# Aws credentials file got updated with temporary access for profile %s"
% profile
)
def proxy_command(command, command_args, credentials):
# Unset variables for sanity sake
os.unsetenv('AWS_DEFAULT_PROFILE')
os.unsetenv('AWS_PROFILE')
os.unsetenv('AWS_ACCESS_KEY_ID')
os.unsetenv('AWS_SECRET_ACCESS_KEY')
os.unsetenv('AWS_SESSION_TOKEN')
os.unsetenv('AWS_SECURITY_TOKEN')
# Set AWS/Boto environemnt variables before executing target command
os.putenv('AWS_ACCESS_KEY_ID', (credentials['AccessKeyId']))
os.putenv('AWS_SECRET_ACCESS_KEY', (credentials['SecretAccessKey']))
os.putenv('AWS_SESSION_TOKEN', (credentials['SessionToken']))
os.putenv('AWS_SECURITY_TOKEN', (credentials['SessionToken']))
command_status = os.system(command + " " + " ".join(command_args))
exit(os.WEXITSTATUS(command_status))
def get_credentials(profile_config):
if 'role_arn' in profile_config:
# Assume role with or without MFA token
session = get_session(profile_config)
return assume_role(session, profile_config)
elif 'mfa_serial' in profile_config:
# This is normal AMI with MFA token
session = get_session(profile_config)
return login_with_mfa(session, profile_config)
elif 'source_profile' in profile_config or\
'aws_access_key_id' not in profile_config:
# This is most likely EC2 instance role
session = get_session(profile_config)
credentials = session.get_credentials().get_frozen_credentials()
return {
'AccessKeyId': credentials.access_key,
'SecretAccessKey': credentials.secret_key,
'SessionToken': str(credentials.token)
}
else:
return {
'AccessKeyId': profile_config['aws_access_key_id'],
'SecretAccessKey': profile_config['aws_secret_access_key'],
'SessionToken': ''
}
def get_session(profile_config):
session_profile = profile_config['profile_name']
if 'source_profile' in profile_config:
session_profile = profile_config['source_profile']
if 'region' in profile_config:
os.putenv('AWS_DEFAULT_REGION', profile_config['region'])
os.putenv('AWS_REGION', profile_config['region'])
# Create a session using profile or EC2 Instance Role
# To use Instance Role set `source_profile` to empty string in aws profile
# configuration file
session = boto3.Session(profile_name=session_profile)
return session
def login_with_mfa(session, profile_config):
# Assume role using STS client
sts_client = session.client('sts')
credentials = sts_client.get_session_token(
DurationSeconds=profile_config['session_timeout'],
SerialNumber=profile_config['mfa_serial'],
TokenCode=profile_config['mfa_code']
)
return credentials['Credentials']
def assume_role(session, profile_config):
role_arn = profile_config['role_arn']
# Assume role using STS client
sts_client = session.client('sts')
if 'mfa_serial' in profile_config:
if profile_config['mfa_code'] is None:
profile_config['mfa_code'] = raw_input("Enter MFA token: ")
assumedRoleObject = sts_client.assume_role(
RoleArn=role_arn,
RoleSessionName="AssumeRoleSession",
DurationSeconds=profile_config['session_timeout'],
SerialNumber=profile_config['mfa_serial'],
TokenCode=profile_config['mfa_code']
)
else:
assumedRoleObject = sts_client.assume_role(
RoleArn=role_arn,
RoleSessionName="AssumeRoleSession",
DurationSeconds=profile_config['session_timeout']
)
return assumedRoleObject['Credentials']
def main():
sudo(CommandParser.CommandParser().get_arguments())
|
{
"content_hash": "e3996aebf5f5bfc2a2dddeec7fd72fce",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 81,
"avg_line_length": 34.37967914438503,
"alnum_prop": 0.6680665733395551,
"repo_name": "voytek-solutions/aws-sudo",
"id": "c9c19191e90cb9144dc37be04d55ff86a5c92ca2",
"size": "6572",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws_sudo/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "779"
},
{
"name": "Python",
"bytes": "13521"
},
{
"name": "Shell",
"bytes": "668"
}
],
"symlink_target": ""
}
|
__author__ = 'ossama'
|
{
"content_hash": "63591e32cdf3cbbac1cb306a258fd45e",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 21,
"avg_line_length": 21,
"alnum_prop": 0.5714285714285714,
"repo_name": "Shabaka/grafo",
"id": "589cd72666538f5319b148ff659495acb693aa32",
"size": "21",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "grafo/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12000"
}
],
"symlink_target": ""
}
|
import sqlite3
_db = sqlite3.connect('dashboard.db')
c = _db.cursor()
# Create table
c.execute(
'''
CREATE TABLE ticketData
(
baseurl,
ticketid,
status
)
''')
|
{
"content_hash": "6c18977decb757b2df29fb4ed0178951",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 37,
"avg_line_length": 11.125,
"alnum_prop": 0.6179775280898876,
"repo_name": "beeldengeluid/divedashboard-ui",
"id": "ed5b4d0029109fbff7ac6e199fc04725df622e8e",
"size": "178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/initdb.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "296244"
},
{
"name": "Dockerfile",
"bytes": "272"
},
{
"name": "HTML",
"bytes": "22686"
},
{
"name": "JavaScript",
"bytes": "2870"
},
{
"name": "Python",
"bytes": "27524"
},
{
"name": "Ruby",
"bytes": "700"
}
],
"symlink_target": ""
}
|
import logging
from collections import namedtuple
from django.conf import settings
from django.db import IntegrityError
import six
from celery.result import GroupResult
from corehq.util.metrics import metrics_counter
from soil.exceptions import TaskFailedError
TaskProgress = namedtuple('TaskProgress',
['current', 'total', 'percent', 'error', 'error_message'])
class STATES(object):
missing = -1
not_started = 0
started = 1
success = 2
failed = 3
class TaskStatus(namedtuple('TaskStatus', ['result', 'error', 'state', 'progress'])):
def missing(self):
return self.state == STATES.missing
def not_started(self):
return self.state == STATES.not_started
def started(self):
return self.state == STATES.started
def success(self):
return self.state == STATES.success
def failed(self):
return self.state == STATES.failed
def get_task_progress(task):
error = False
error_message = ''
try:
if not task:
info = None
else:
info = task.info
except (TypeError, NotImplementedError):
current = total = percent = None
logging.exception("No celery result backend?")
else:
if info is None:
current = total = percent = None
elif isinstance(info, Exception):
current = total = percent = 100
error = True
error_message = "%s: %s" % (type(info).__name__, info)
else:
current = info.get('current')
total = info.get('total')
if total == 0:
percent = 100
else:
percent = current * 100 // total if total and current is not None else 0
return TaskProgress(
current=current,
total=total,
percent=percent,
error=error,
error_message=error_message,
)
def set_task_progress(task, current, total, src='unknown'):
metrics_counter('commcare.celery.set_task_progress', tags={
'src': src
})
update_task_state(task, 'PROGRESS', {'current': current, 'total': total})
class TaskProgressManager(object):
"""
A context manager that mediates calls to `set_task_progress`
and only flushes updates when progress % changes by 1/resolution or more
(conceptual "pixel size" on progress bar)
and flushes on __exit__
"""
def __init__(self, task, src='unknown_via_progress_manager', resolution=100):
self.task = task
self._resolution = resolution
self._value = {'current': None, 'total': None}
self._src = src
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.flush()
def set_progress(self, current, total):
new_value = {'current': current, 'total': total}
if self._should_flush(new_value):
self._value = new_value
self.flush()
def _should_flush(self, new_value):
return self._quantized_value(**self._value) != self._quantized_value(**new_value)
def _quantized_value(self, current, total):
return self._resolution * current // total if current and total else None
def flush(self):
set_task_progress(self.task, src=self._src, **self._value)
def update_task_state(task, state, meta):
try:
if task:
task.update_state(state=state, meta=meta)
except (TypeError, NotImplementedError):
pass
except IntegrityError:
# Not called in task context just pass
pass
def get_multiple_task_progress(task):
current = sum(int(result.ready()) for result in task.results)
total = len(task.subtasks)
percent = current * 100 // total if total and current is not None else 0
return TaskProgress(
current=current,
total=total,
percent=percent,
error=None,
error_message=None,
)
def get_task_status(task, is_multiple_download_task=False):
context_result = None
context_error = None
is_ready = False
failed = False
if not task:
progress = get_task_progress(None)
context_result = False
context_error = []
elif is_multiple_download_task:
if task.ready():
context_result, context_error = _get_download_context_multiple_tasks(task)
progress = get_multiple_task_progress(task)
else:
if task.failed():
failed = True
result = task.result
if task.successful():
is_ready = True
context_result = result and result.get('messages')
elif result and isinstance(result, Exception):
context_error = six.text_type(result)
if '\t' in context_error:
context_error = [err for err in context_error.split('\t') if err]
elif result and result.get('errors'):
failed = True
context_error = result.get('errors')
progress = get_task_progress(task)
def progress_complete():
return (
getattr(settings, 'CELERY_TASK_ALWAYS_EAGER', False) or
progress and progress.percent == 100 and
not progress.error
)
is_ready = is_ready or progress_complete()
if failed:
state = STATES.failed
if isinstance(task.result, Exception) and not context_error:
context_error = "%s: %s" % (type(task.result).__name__, task.result)
elif is_ready:
state = STATES.success
elif not _is_real_task(task):
state = STATES.missing
elif _is_task_pending(task):
state = STATES.not_started
elif progress.percent is None:
state = STATES.missing
else:
state = STATES.started
return TaskStatus(
state=state,
result=context_result,
error=context_error,
progress=progress,
)
def _is_real_task(task):
# You can look up a task with a made-up ID and it'll give you a meaningless task object
# Make sure the task object you have corresponds to an actual celery task
if task:
# Non-real "tasks" will have all null values except for
# - status: "PENDING"
# - task_id: <task_id>
# If ANYTHING else is set, we give it the benefit of the doubt and call it real
return any(
value is not None
for key, value in task._get_task_meta().items()
if not (
(key == 'status' and value == 'PENDING')
or key == 'task_id'
)
)
else:
return False
def _is_task_pending(task):
if isinstance(task, GroupResult):
return any([async_task.state == 'PENDING' for async_task in task.children])
else:
return task.state == 'PENDING'
def _get_download_context_multiple_tasks(task):
"""for grouped celery tasks, append all results to the context
"""
results = task.results
messages = []
errors = []
for result in results:
try:
task_result = result.get()
except Exception as e: # Celery raises whatever exception was thrown
# in the task when accessing the result
errors.append(e)
else:
try:
messages.append(task_result.get("messages"))
except AttributeError:
messages.append(str(task_result))
return messages, errors
|
{
"content_hash": "d9424b69695996793ca2e7e53cfacba2",
"timestamp": "",
"source": "github",
"line_count": 254,
"max_line_length": 91,
"avg_line_length": 29.381889763779526,
"alnum_prop": 0.5934610746348653,
"repo_name": "dimagi/commcare-hq",
"id": "081a2e96c2771c12f3003aa19fbf104ec9065ea0",
"size": "7463",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/ex-submodules/soil/progress.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "82928"
},
{
"name": "Dockerfile",
"bytes": "2341"
},
{
"name": "HTML",
"bytes": "2589268"
},
{
"name": "JavaScript",
"bytes": "5889543"
},
{
"name": "Jinja",
"bytes": "3693"
},
{
"name": "Less",
"bytes": "176180"
},
{
"name": "Makefile",
"bytes": "1622"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "66704"
},
{
"name": "Python",
"bytes": "21779773"
},
{
"name": "Roff",
"bytes": "150"
},
{
"name": "Shell",
"bytes": "67473"
}
],
"symlink_target": ""
}
|
import os
import tensorflow
def module_to_path(module):
file = module.__file__
sep = os.sep
return file[file.rfind("site-packages") + len("site-packages") + len(sep):-len("__init__.py") - len(sep)].replace(sep, ".")
root = tensorflow # or tensorflow.compat.v1
module_type = type(tensorflow)
print('\n'.join('%s %s' % (name, module_to_path(module))
for name in dir(root)
if not name.startswith('_')
for module in (getattr(root, name),)
if isinstance(module, module_type)))
|
{
"content_hash": "08fd1133775ad2461d178fac429cd947",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 127,
"avg_line_length": 32.470588235294116,
"alnum_prop": 0.5887681159420289,
"repo_name": "JetBrains/intellij-community",
"id": "3bc79027d3418e62216e8554a0fb614c30cc3e01",
"size": "552",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "python/testData/packages/tensorflow/modules.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
from django.db import models
from django.conf import settings
class Event(models.Model):
name = models.CharField(max_length=100)
location = models.CharField(max_length=100)
date = models.DateTimeField()
last_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return '{} at {} on {}'.format(self.name, self.location, self.date)
class EventRole(models.Model):
role = models.CharField(max_length=50)
def __str__(self):
return self.role
class EventAttending(models.Model):
class Meta:
abstract = True
event = models.ForeignKey('Event')
role = models.ForeignKey('EventRole')
def __str__(self):
return 'attending {} as {}'.format(self.event, self.role)
class LeagueMemberEventAttending(EventAttending):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
def __str__(self):
return '{} {}'.format(self.user, super(LeagueMemberEventAttending, self).__str__())
class VisitorEventAttending(EventAttending):
name = models.CharField(max_length=100)
contact_details = models.CharField(max_length=100, blank=True)
def __str__(self):
return 'Visitor {} {}'.format(self.name, super(VisitorEventAttending, self).__str__())
|
{
"content_hash": "838f3ebc9dea2a6415884a77f6c0c779",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 94,
"avg_line_length": 27.733333333333334,
"alnum_prop": 0.6714743589743589,
"repo_name": "leepgent/rdec",
"id": "3d6f799e231704002f8a5d55e59f22c70b3c9f31",
"size": "1248",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rdec/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "16565"
},
{
"name": "Python",
"bytes": "34645"
}
],
"symlink_target": ""
}
|
from solution import NumArray
nums = [-2, 0, 3, -5, 2, -1]
numArray = NumArray(nums)
print(numArray.sumRange(0, 2))
print(numArray.sumRange(2, 5))
print(numArray.sumRange(0, 5))
|
{
"content_hash": "b5db8d482c6ada20d5ca0f4256bbd7d0",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 30,
"avg_line_length": 25.571428571428573,
"alnum_prop": 0.6983240223463687,
"repo_name": "zhlinh/leetcode",
"id": "b5fcf14487dfeb974fcf3f8975a733659e0137e4",
"size": "226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "0303.Range Sum Query - Immutable/test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "478111"
}
],
"symlink_target": ""
}
|
"""
Description here
"""
import logging as log
import unittest
import pytransmission.popgen.moran as m
import os
import tempfile
log.basicConfig(level=log.DEBUG, format='%(asctime)s %(levelname)s: %(message)s')
class MoranModelTest(unittest.TestCase):
def test_basic_watkins_convergence(self):
pop = 1000
mutation = 0.1
answer = 46.0517 # given by Mathematica 9
res = m.moran_watkins_convergence_to_stationarity(pop, mutation)
log.debug("estimated convergence time: %s answer: %s", res, answer)
self.assertAlmostEqual(answer, res, None, None, 0.01)
def test_mutation_from_theta(self):
theta = [0.25, 0.5, 1.0, 2.0, 3.0, 10.0]
popsize = 100
for t in theta:
mut = m.moran_mutation_rate_from_theta(popsize, t)
self.assertTrue(True,"Not a full test, always passes")
def test_expected_k(self):
theta = [0.25, 0.5, 1.0, 2.0, 3.0, 10.0]
ssize = 50
for t in theta:
(e_k, v_k) = m.moran_expected_traits_at_locus(t, ssize)
self.assertTrue(True, "Not a full test, always passes")
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "f0811f413dcd30fc8cab5770ffb9697d",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 81,
"avg_line_length": 24.142857142857142,
"alnum_prop": 0.6145393068469992,
"repo_name": "mmadsen/pytransmission",
"id": "e9f6453c3137567f905467e4ff1dbcee768a39e8",
"size": "1392",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_moran.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "37935"
}
],
"symlink_target": ""
}
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Difference'] , ['LinearTrend'] , ['Seasonal_Second'] , ['SVR'] );
|
{
"content_hash": "16d7d4f57fed5bb46d1d938f40ccd4d4",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 88,
"avg_line_length": 40.25,
"alnum_prop": 0.7142857142857143,
"repo_name": "antoinecarme/pyaf",
"id": "bbfa6940b7136581e123734c1fe56491181edb6b",
"size": "161",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/model_control/detailed/transf_Difference/model_control_one_enabled_Difference_LinearTrend_Seasonal_Second_SVR.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
#!/usr/bin/env pythons
import sys
import subprocess
import time
import threading
import traceback
import json
import signal
from bin import eva_var
from bin import eva_utils
conf = eva_var.conf
stat = eva_var.stat
common = eva_var.common
def parse_input():
conf_file = sys.argv[1]
if len(sys.argv) == 3:
conf.g_web_report = False
conf_dict = json.load(open(conf_file, 'r'))
bench_cmd_prefix = './tera_bench --compression_ratio=1 '
for pre, post in conf_dict.iteritems():
if pre == conf.TABLE_NAME:
conf.g_test_conf[conf.TABLE_NAME] = post
if pre == conf.KEY_SIZE:
conf.g_test_conf[conf.KEY_SIZE] = int(post)
bench_cmd_prefix += '--key_size={ks} '.format(ks=post)
if pre == conf.VALUE_SIZE:
conf.g_test_conf[conf.VALUE_SIZE] = int(post)
bench_cmd_prefix += '--value_size={vs} '.format(vs=post)
if pre == conf.KEY_SEED:
conf.g_test_conf[conf.KEY_SEED] = int(post)
bench_cmd_prefix += '--key_seed={ks} '.format(ks=post)
if pre == conf.VALUE_SEED:
conf.g_test_conf[conf.VALUE_SEED] = int(post)
bench_cmd_prefix += '--value_seed={vs} '.format(vs=post)
if pre == conf.MODE:
conf.g_test_conf[conf.MODE] = post
if post == conf.MODE_SEQ_WRITE:
bench_cmd_prefix += '--benchmarks=seq '
common.EMAIL_BLOCK_TITLE = 'Sequential Write'
elif post == conf.MODE_RAND_WRITE:
bench_cmd_prefix += '--benchmarks=random '
common.EMAIL_BLOCK_TITLE = 'Random Write'
elif post == conf.MODE_SCAN:
common.EMAIL_BLOCK_TITLE = 'Scan'
elif post == conf.MODE_READ:
bench_cmd_prefix += '--benchmarks=random '
common.EMAIL_BLOCK_TITLE = 'Read'
elif pre == conf.TABLET_NUM:
conf.g_test_conf[conf.TABLET_NUM] = int(post)
elif pre == conf.SPLIT_SIZE:
conf.g_test_conf[conf.SPLIT_SIZE] = int(post)
elif pre == conf.TS_NUMBER:
conf.g_test_conf[conf.TS_NUMBER] = int(post)
elif pre == conf.WRITE_SPEED_LIMIT:
conf.g_speed_limit = post
elif pre == conf.READ_SPEED_LIMIT:
conf.g_test_conf[conf.READ_SPEED_LIMIT] = int(post)
elif pre == 'random':
bench_cmd_prefix += '--random=' + post + ' '
elif pre == conf.ENTRY_NUM:
num = float(post) * common.MEGA
bench_cmd_prefix += '--num=' + str(int(num)) + ' '
conf.g_test_conf[conf.ENTRY_NUM] = int(num)
elif pre == conf.SCAN_BUFFER:
buffer = float(post)
buffer *= common.MEGA
conf.g_test_conf[conf.SCAN_BUFFER] = str(int(buffer))
elif pre == conf.SCHEMA:
conf.g_test_conf[conf.SCHEMA] = post
conf.g_test_conf[conf.LG_NUM] = len(post)
elif pre == conf.STEP:
conf.g_test_conf[conf.STEP] = post
elif pre == 'web_report_type':
conf.g_web_report_type = post
conf.g_test_conf[conf.ENTRY_SIZE] = conf.g_test_conf[conf.KEY_SIZE] + conf.g_test_conf[conf.VALUE_SIZE]
conf.g_test_conf[conf.WRITE_SPEED_LIMIT] = int(round(float(conf.g_speed_limit) / conf.g_test_conf[conf.TABLET_NUM] * common.MEGA / conf.g_test_conf[conf.ENTRY_SIZE]))
conf.g_test_conf[conf.READ_SPEED_LIMIT] = int(float(conf.g_test_conf[conf.READ_SPEED_LIMIT]) / conf.g_test_conf[conf.TABLET_NUM])
conf.g_test_conf[conf.CF_NUM], conf.g_test_conf[conf.CF] = \
eva_utils.table_manipulate(conf.g_test_conf[conf.TABLE_NAME], conf.CF, conf.g_test_conf[conf.SCHEMA])
if conf.g_test_conf[conf.CF] != '':
bench_cmd_prefix += '--cf=' + conf.g_test_conf[conf.CF] + ' '
if conf.g_test_conf[conf.STEP] == 'True':
bench_cmd_prefix += '--key_step=' + str(common.RANDOM_MAX / conf.g_test_conf[conf.ENTRY_NUM]) + ' '
conf.TERA_BENCH = bench_cmd_prefix
conf.g_datasize = (conf.g_test_conf[conf.CF_NUM] * conf.g_test_conf[conf.TABLET_NUM] *
conf.g_test_conf[conf.ENTRY_NUM] * conf.g_test_conf[conf.ENTRY_SIZE])
if conf.g_test_conf[conf.MODE] == conf.MODE_SEQ_WRITE or conf.g_test_conf[conf.MODE] == conf.MODE_RAND_WRITE:
print '\t%-25s' % 'estimated running time:', get_time_form((conf.g_datasize >> 20) / float(conf.g_speed_limit))
else:
print '\t%-25s' % 'estimated running time:', get_time_form(conf.g_test_conf[conf.ENTRY_NUM] /
conf.g_test_conf[conf.READ_SPEED_LIMIT])
conf.g_datasize = get_data_size(conf.g_datasize)
print '\t%-25s' % 'user data size:', conf.g_datasize
if common.g_logger is not None:
common.g_logger.info('running tera_mark: ' + str(conf.g_test_conf))
def work():
try:
common.g_next_query_time = time.time() + common.QUERY_INTERVAL
common.g_query_thread = threading.Thread(target=eva_utils.query)
common.g_query_event = threading.Event()
run_test()
except:
common.g_logger.info(traceback.print_exc())
def run_test():
common.g_query_thread.setDaemon(True)
common.g_query_thread.start()
common.g_logger.info('running tera_mark with {n} tablets'.format(n=conf.g_test_conf[conf.TABLET_NUM]))
wait_list = []
kill_list = []
start_time = time.time()
if conf.g_test_conf[conf.MODE] == conf.MODE_SEQ_WRITE or\
conf.g_test_conf[conf.MODE] == conf.MODE_RAND_WRITE:
wait_list, kill_list = run_write_test()
elif conf.g_test_conf[conf.MODE] == conf.MODE_SCAN:
wait_list, kill_list = run_scan_test()
elif conf.g_test_conf[conf.MODE] == conf.MODE_READ:
wait_list, kill_list = run_read_test()
count = 0
wait_num = conf.g_test_conf[conf.TABLET_NUM] * 2 / 3 + 1
while count < wait_num:
count = 0
for ret in wait_list:
if ret.poll() is not None:
count += 1
time.sleep(common.QUERY_INTERVAL)
for ret in wait_list:
try:
ret.kill()
except OSError:
pass
end_time = time.time()
for ret in kill_list:
ret.kill()
total_time = get_time_form(end_time - start_time)
common.g_logger.info('done running test: ' + total_time)
common.g_exit = True
common.g_query_event.set()
common.g_query_thread.join()
compute_write_main(total_time)
def compute_write_main(total_time):
try:
eva_utils.compute_ts_stat()
eva_utils.compute_stat()
except ZeroDivisionError:
common.g_logger.error(traceback.print_exc())
mail = open(common.MAIL_PATH, 'a')
web = None
if conf.g_web_report_type != '' and conf.g_web_report:
web = open(common.WEB_PATH, 'a')
desp = 'data={datasize} key={ks} value={vs} lg={lg} cf={cf} run_time={t}'.format(
datasize=conf.g_datasize, ks=get_data_size(conf.g_test_conf[conf.KEY_SIZE]),
vs=get_data_size(conf.g_test_conf[conf.VALUE_SIZE]), lg=conf.g_test_conf[conf.LG_NUM],
cf=conf.g_test_conf[conf.CF_NUM], t=total_time, e=str(common.g_force_exit))
if conf.g_test_conf[conf.MODE] == conf.MODE_SEQ_WRITE or conf.g_test_conf[conf.MODE] == conf.MODE_RAND_WRITE:
desp += 'write_speed={ws}/TS*M schema={s}'.format(
ws=int(conf.g_speed_limit) / int(conf.g_test_conf[conf.TS_NUMBER]),
s=json.dumps(conf.g_test_conf[conf.SCHEMA], separators=(',', ':')))
elif conf.g_test_conf[conf.MODE] == conf.MODE_READ:
desp += 'write_speed={ws}/TS*M read_speed={rs}/TS*Qps schema={s}'.format(
ws=int(conf.g_speed_limit) / int(conf.g_test_conf[conf.TS_NUMBER]),
rs=int(int(conf.g_test_conf[conf.READ_SPEED_LIMIT]) * int(conf.g_test_conf[conf.TABLET_NUM]) /
int(conf.g_test_conf[conf.TS_NUMBER])),
s=json.dumps(conf.g_test_conf[conf.SCHEMA], separators=(',', ':')))
elif conf.g_test_conf[conf.MODE] == conf.MODE_SCAN:
desp += 'write_speed={ws}/TS*M scan_buffer={buffer}/M'.format(
ws=int(conf.g_speed_limit) / int(conf.g_test_conf[conf.TS_NUMBER]),
buffer=float(conf.g_test_conf[conf.SCAN_BUFFER])/common.MEGA)
eva_utils.write_email(mail, web, desp)
def run_write_test():
eva_utils.table_manipulate(conf.g_test_conf[conf.TABLE_NAME], common.CREATE, conf.g_test_conf[conf.SCHEMA])
wait_list = []
for i in range(conf.g_test_conf[conf.TABLET_NUM]):
prefix = '%04d' % i
bench_cmd = conf.TERA_BENCH + " | awk -F '\t' '{print \"" + prefix + """\"$1"\t"$2"\t"$3"\t"$4}' """
if conf.g_test_conf[conf.KV] is True:
bench_cmd = conf.TERA_BENCH + " | awk -F '\t' '{print \"" + prefix + """\"$1"\t"$2}' """
cmd = '{bench} | ./tera_mark --mode=w --tablename={name} --type=async --verify=false --entry_limit={limit}'.\
format(bench=bench_cmd, name=conf.g_test_conf[conf.TABLE_NAME], limit=str(conf.g_test_conf[conf.WRITE_SPEED_LIMIT]))
print cmd
fout = open('../tmp/'+str(i)+'.w.out', 'w')
ferr = open('../tmp/'+str(i)+'.w.err', 'w')
ret = subprocess.Popen(cmd, stdout=fout, stderr=ferr, shell=True)
wait_list.append(ret)
return wait_list, []
def run_scan_test():
wait_list = []
fp = open(common.TMP_DIR + 't.deli', 'r')
deli_str = fp.read()
deli = deli_str.split('\n')
deli = filter(None, deli)
start_end_key = []
key_pair = ["", ""]
for tablet in deli:
key_pair[1] = tablet
start_end_key.append(key_pair)
key_pair = [tablet, ""]
start_end_key.append(key_pair)
for i in range(len(start_end_key)):
cmd = './tera_mark --mode=s --tablename={name} --type=async --verify=false --start_key={skey} --end_key={ekey} --buf_size={buffer}'.\
format(name=conf.g_test_conf[conf.TABLE_NAME], skey=start_end_key[i][0], ekey=start_end_key[i][1], buffer=conf.g_test_conf[conf.SCAN_BUFFER])
print cmd
fout = open('../tmp/'+str(i)+'.r.out', 'w')
ferr = open('../tmp/'+str(i)+'.r.err', 'w')
ret = subprocess.Popen(cmd, stdout=fout, stderr=ferr, shell=True)
wait_list.append(ret)
return wait_list, []
bench_cmd = ''
def run_read_test():
write_ret_list = []
read_ret_list = []
if conf.g_test_conf[conf.WRITE_SPEED_LIMIT] != 0:
for i in range(conf.g_test_conf[conf.TABLET_NUM]):
prefix = '%04d' % i
global bench_cmd
if conf.g_test_conf[conf.KV] is True:
tera_bench = './tera_bench --value_size={vs} --compression_ratio=1 --key_seed=111 --value_seed=111 --key_size={ks} --benchmarks=random --num=10000000'.\
format(ks=conf.g_test_conf[conf.KEY_SIZE], vs=conf.g_test_conf[conf.VALUE_SIZE])
bench_cmd = tera_bench + " | awk -F '\t' '{print \"" + prefix + """\"$1"\t"$2}' """
else:
tera_bench = './tera_bench --value_size={vs} --compression_ratio=1 --key_seed=111 --value_seed=111 --key_size={ks} --benchmarks=random --cf={cf} --num=10000000'.\
format(cf=conf.g_test_conf[conf.CF], ks=conf.g_test_conf[conf.KEY_SIZE], vs=conf.g_test_conf[conf.VALUE_SIZE])
bench_cmd = tera_bench + " | awk -F '\t' '{print \"" + prefix + """\"$1"\t"$2"\t"$3"\t"$4}' """
cmd = '{bench} | ./tera_mark --mode=w --tablename={name} --type=async --verify=false --entry_limit={limit}'.\
format(bench=bench_cmd, name=conf.g_test_conf[conf.TABLE_NAME], limit=str(conf.g_test_conf[conf.WRITE_SPEED_LIMIT]))
print cmd
fout = open('../tmp/'+str(i)+'.w.out', 'w')
ferr = open('../tmp/'+str(i)+'.w.err', 'w')
ret = subprocess.Popen(cmd, stdout=fout, stderr=ferr, shell=True)
write_ret_list.append(ret)
for i in range(conf.g_test_conf[conf.TABLET_NUM]):
prefix = '%04d' % i
bench_cmd = conf.TERA_BENCH[0:conf.TERA_BENCH.rfind('--cf')] + " | awk -F '\t' '{print \"" + prefix + """\"$1}' """
cmd = '{bench} | ./tera_mark --mode=r --tablename={name} --type=async --verify=false --entry_limit={limit}'.\
format(bench=bench_cmd, name=conf.g_test_conf[conf.TABLE_NAME], limit=conf.g_test_conf[conf.READ_SPEED_LIMIT])
print cmd
fout = open('../tmp/'+str(i)+'.r.out', 'w')
ferr = open('../tmp/'+str(i)+'.r.err', 'w')
ret = subprocess.Popen(cmd, stdout=fout, stderr=ferr, shell=True)
read_ret_list.append(ret)
return read_ret_list, write_ret_list
def handler(signum, frame):
common.g_force_exit = True
common.g_exit = True
def get_data_size(size):
size = int(size)
post_fix = ['B', 'K', 'M', 'G', 'T']
sizes = []
tmp = size
try:
for i in range(len(post_fix)):
sizes.append(tmp % 1024)
tmp >>= 10
if tmp == 0:
break
if len(sizes) <= 1:
return str(sizes[0]) + post_fix[0]
else:
largest = len(sizes) - 1
size = sizes[largest] + float(sizes[largest - 1]) / 1024
return '%03.2f' % size + post_fix[largest]
except:
common.g_logger.info(traceback.print_exc())
def get_time_form(total_time):
total_time = int(total_time)
hours = total_time / 3600
mins = (total_time % 3600) / 60
return str(hours) + 'h' + str(mins) + 'm'
def main():
signal.signal(signal.SIGINT, handler)
signal.signal(signal.SIGTERM, handler)
eva_utils.init()
parse_input()
work()
if __name__ == '__main__':
main()
|
{
"content_hash": "43b719dff0c24b08829178b1ce1a6b19",
"timestamp": "",
"source": "github",
"line_count": 312,
"max_line_length": 178,
"avg_line_length": 44.64102564102564,
"alnum_prop": 0.5613153360137851,
"repo_name": "yvxiang/tera",
"id": "f4d1bd28b36245df0c1ab1ef83d691eda8327820",
"size": "14091",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/benchmark/eva/run.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "80009"
},
{
"name": "C++",
"bytes": "3542326"
},
{
"name": "Java",
"bytes": "29553"
},
{
"name": "Makefile",
"bytes": "15260"
},
{
"name": "Protocol Buffer",
"bytes": "28535"
},
{
"name": "Python",
"bytes": "115896"
},
{
"name": "Shell",
"bytes": "34400"
}
],
"symlink_target": ""
}
|
from netforce.model import Model, fields
import time
class CartPromotion(Model):
_name = "ecom.cart.promotion"
_fields = {
"cart_id": fields.Many2One("ecom.cart", "Cart", required=True, on_delete="cascade"),
"promotion_id": fields.Many2One("sale.promotion","Promotion",required=True),
"product_id": fields.Many2One("product","Discount Product"),
"qty": fields.Decimal("Discount Qty"),
"percent": fields.Decimal("Discount Percent"),
"amount": fields.Decimal("Discount Amount"),
"cond_product_id": fields.Many2One("product","Condition Product"),
"cond_qty": fields.Decimal("Condition Qty"),
}
_order = "id"
CartPromotion.register()
|
{
"content_hash": "a9bbbcc3616d300572fb3617b42cc61e",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 92,
"avg_line_length": 37.526315789473685,
"alnum_prop": 0.6479663394109397,
"repo_name": "sidzan/netforce",
"id": "ca63187fbbd6f87ef8e2ec9afeea299c84be9a59",
"size": "1818",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "netforce_ecom/netforce_ecom/models/ecom_cart_promotion.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "248732"
},
{
"name": "HTML",
"bytes": "543196"
},
{
"name": "Java",
"bytes": "11870"
},
{
"name": "JavaScript",
"bytes": "3659528"
},
{
"name": "Makefile",
"bytes": "353"
},
{
"name": "PHP",
"bytes": "2274"
},
{
"name": "Python",
"bytes": "3263548"
},
{
"name": "Shell",
"bytes": "117"
}
],
"symlink_target": ""
}
|
class StopDispatching(StopIteration):
def __init__(self):
self.returnValue = None
class Dispatcher(object):
def __init__(self):
self._listeners = {}
def fire(self, name, *args, **kwargs):
if not self.hasListeners(name):
return
for listener in self._listeners[name]:
listener(*args, **kwargs)
def listen(self, name, listener, priority=None):
if not callable(listener):
raise TypeError('Listener has to be callable')
if name not in self._listeners:
self._listeners[name] = []
self._listeners[name].append(listener)
def __call__(self, *args, **kwargs):
return self.fire(*args, **kwargs)
def hasListeners(self, name):
return name in self._listeners
|
{
"content_hash": "b9acc45d51f627740e3dd2459905663d",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 58,
"avg_line_length": 24.96875,
"alnum_prop": 0.590738423028786,
"repo_name": "mtils/ems",
"id": "5e1abd2a656471adebf4153561e42287ad4070f8",
"size": "800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ems/event/dispatcher.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "3631"
},
{
"name": "Python",
"bytes": "1924893"
},
{
"name": "QML",
"bytes": "16755"
}
],
"symlink_target": ""
}
|
import re
import debug, tests
from common import TestCommon, TimeoutError
from results import RawResults
class BulkTestCommon(TestCommon):
use_memcpy = None
def get_module_name(self):
return "bulkbench"
def get_build_targets(self, build, machine):
targets = super(BulkTestCommon, self).get_build_targets(build, machine)
targets.append('%s/sbin/%s' %
(machine.get_bootarch(), self.get_module_name()))
return targets
def run(self, build, machine, testdir):
# compute two core IDs on different sockets to benchmark between
sendcore = machine.get_coreids()[0]
recvcore = machine.get_coreids()[machine.get_cores_per_socket()]
# Iterate over all bulk block sizes
for i in [2048]:
debug.log('running %s block size %d' % (self.name, i))
modules = self.get_modules(build, machine)
modules.add_module(self.get_module_name(),
["core=%d" % sendcore, i, "send", self.use_memcpy])
modules.add_module(self.get_module_name(),
["core=%d" % recvcore, i, "recv", self.use_memcpy])
self.boot(machine, modules)
for line in self.collect_data(machine):
yield line
def process_data(self, testdir, rawiter):
results = RawResults('buffersize')
data = []
for line in rawiter:
m = re.match("rawresult (\d+)", line)
if m:
data.append(2048 / int(m.group(1)))
results.add_group("2048", data)
return results
@tests.add_test
class BulkThroughputTest(BulkTestCommon):
''' Bulk transport throughput microbenchmark '''
name = "bulk"
use_memcpy = "nomemcpy"
@tests.add_test
class BulkMemThroughputTest(BulkTestCommon):
''' Bulk transport throughput microbenchmark with memcpy on receiver '''
name = "bulk_memcpy"
use_memcpy = "memcpy"
|
{
"content_hash": "38275355cbf2234939bb47817a8ae392",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 82,
"avg_line_length": 35.357142857142854,
"alnum_prop": 0.601010101010101,
"repo_name": "modeswitch/barrelfish",
"id": "7496ba90054ff575569f25c45a5cadacd89f4948",
"size": "2412",
"binary": false,
"copies": "5",
"ref": "refs/heads/default",
"path": "tools/harness/tests/bulktests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "998862"
},
{
"name": "C",
"bytes": "46857143"
},
{
"name": "C++",
"bytes": "1745172"
},
{
"name": "Clojure",
"bytes": "7002"
},
{
"name": "Emacs Lisp",
"bytes": "1639"
},
{
"name": "Gnuplot",
"bytes": "3830"
},
{
"name": "Haskell",
"bytes": "141869"
},
{
"name": "Objective-C",
"bytes": "115504"
},
{
"name": "Perl",
"bytes": "2716335"
},
{
"name": "Prolog",
"bytes": "2743156"
},
{
"name": "Python",
"bytes": "5352"
},
{
"name": "Scheme",
"bytes": "4249"
},
{
"name": "Scilab",
"bytes": "5315"
},
{
"name": "Shell",
"bytes": "171692"
},
{
"name": "Tcl",
"bytes": "18591"
},
{
"name": "TeX",
"bytes": "735459"
},
{
"name": "eC",
"bytes": "5079"
}
],
"symlink_target": ""
}
|
'''
Syntax:
variable = input('prompt label')
print('result text', variable)
when run, the input() field will
note: differences between python 2 and 3.
in 2, if you want to use user input as a string, you must use raw_imput(). vers 3 naturally interprets input() as string
'''
myname = input('please enter your name: ')
myage = input('and your age please?: ')
print('Hello world, my name is', myname, 'and I am', myage, 'years old.')
#formatting
print('Hello world, my name is %s and I am %s years old.' %(myname,myage))
#or
print('Hello world, my name is {} and I am {} years old.'.format(myname,myage))
#formatting, with triple quotes
'''
long messages with line breaks can be printed with triple quotes
'''
#example:
print('''Hello world.
my name is {}
and I am {} years old.'''.format(myname,myage))
#escape characters in string
'''
\t tab. example: ('hello\tworld')
\n prints new line
\\ prints backslash
\" prints quotes
\' prints single
r raw text. example r('hello\tworld') will return 'hello\tworld'
'''
#choice & decisions: if, for , while loop try, except
##############
##############
#condition statements (if true= true, else= false)
'''
== equals
!= not equals #5!=2
< smaller than #2<5
> greater than #5>2
<= smaller or equals
>= greater than or equal
'''
# operators: and or not
'''
and #returns true if all conditions are met
or #returns true if one condition is met
else #returns false if all conditions are met
'''
print(5==5 and 2>1)
#result: true
print(5==6 and 2>1)
#result: false
print(5==6 or 2>1)
#result: true
#if
'''
syntax:
if varible:
decisions
elif varible2:
decisions
elif varible3:
decisions
else:
decisions
note: no () are needed after if, elif, else. also, no {} needed to define start and end of statement
indentation is key. anything indented will be treated as a block of code to execute if true.
'''
#example:
userinput = input('enter 1 or 2: ')
if userinput == "1":
print("hello world")
print("how are you?")
elif userinput == "2":
print("python rocks!")
print("I love python")
else:
print ("you did not enter a valid number")
#result:
hello world
how are you?
#inline if
'''
simplified if statement, for 1 condition tasks
'''
#examples:
num1 = 12 if myint==10 else 13
myint = 11
print("this is task a" if myint ==10 else "this is task b")
#for loop
'''
for #loop executes a block of code repetedly until the condition in the for statment is no longer valid.
#iterable- anything that can be looped over, such a string.
#Syntax:
for a in iterable
print (a)
'''
#example:
pets = ['cats', 'dogs', 'rabbits', 'hamsters']
for mypets in pets:
print (mypets)
# the for statment above takes all members of pets dictionary and assignes each member to mypets variable.
#result:
cats
dogs
rabbits
hamsters
#to list index:
for index, mypets in enumerate(pets):
print (index, mypets)
#result:
(0, 'cats')
(1, 'dogs')
(2, 'rabbits')
(3, 'hamsters')
#loop into a string.
message = 'hello'
for i in message:
print (i)
#result:
h
e
l
l
o
#looping through a sequence of numbers
'''
#syntax
range(start, end, step)
#note: if start is not given, generates from 0
if step is not given, than numbers provided will be consecutive
end is required, but not generated in result
'''
#Examples:
print range(5)
#Result: [0, 1, 2, 3, 4]
print range(3,10)
[3, 4, 5, 6, 7, 8, 9]
#example in FOR
for i in range(5):
print (i)
#Result:
0
1
2
3
4
#while loop
'''
while # repeatedly executes instructions inside the loop while certain condition remains valid.
#syntaxwhile condition is true:
do A
'''
#example
counter = 5
while counter > 0:
print ("Counter - ", counter)
counter = counter - 1
#Result:
Counter = 5
Counter = 4
Counter = 3
Counter = 2
Counter = 1
#example
counter = 5
while counter < 10:
print ("Counter - ", counter)
counter = counter + 1
#Result:
Counter = 5
Counter = 6
Counter = 7
Counter = 8
Counter = 9
''''
###important!###
beware of infinite loops!!!!
always add counter = counter +- 1 for defined increments,
so the while commands can eventually reach a FALSE statement, to stop.
if not, INFINITY AND BEYOND!!
'''
#Break loop
'''
to exit a loop, add breaks
'''
#nested example:
j=0
for i in range(5):
j = j + 2
print ('i= ', i, ', j = ', j)
if j== 6:
break
# continue in loop
'''
with continue, the rest of the loop AFTER the keyword is skipped for the iteration
'''
#example:
j=0
for i in range(5):
j=j+2
print ('\ni= ', i, ', j = ', j)
if j == 6 :
continue
print('i will be skipped over if j=6')
''''
when j=6 the line after continue keyword is not printed. everything else runs as normal.
'''
# try, except
'''
try, except controls how a program proceeds when an error occurs
#syntax:
try:
do something
except:
do something else when error occurs
'''
#example:
try:
answer=12/0
print (answer)
except:
print ('danger will robinson!!!')
#except exetutes when an error occurs (dividing 12 by 0)
# for specified error messages,
try:
userInput1 = int(input("please enter a number:"))
userInput2 = int(input("please enter another number:"))
answer = userInput1/userInput2
print ("the answer is ", answer)
myFile = open("missing.txt",'r')
except ValueError:
print ("error: non number entered")
|
{
"content_hash": "83304fed9bc0e9895926d49c76768455",
"timestamp": "",
"source": "github",
"line_count": 286,
"max_line_length": 120,
"avg_line_length": 18.32867132867133,
"alnum_prop": 0.6785578023655093,
"repo_name": "lexrhodd/py_fun",
"id": "2b99b53312ee5b48796bb68b51238d746cb0382a",
"size": "5378",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Alexland/alex_hello_world.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "19389"
}
],
"symlink_target": ""
}
|
from .iht import *
from .fasta import *
from .zeroSR1 import *
from .proxes import *
from .wavelet import *
from .iswt import *
from .iuwt import *
from .alps import *
from .smooth import *
from .proxes_rank1 import *
from .damp import *
from .bsbl import *
|
{
"content_hash": "72bebdf3b2b3831312aea97ed972db4a",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 27,
"avg_line_length": 21.75,
"alnum_prop": 0.7088122605363985,
"repo_name": "aasensio/pyiacsun",
"id": "f5f86cfc3ae4140a33ffd2c9f37131fe3dae1919",
"size": "261",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyiacsun/sparse/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "838"
},
{
"name": "FORTRAN",
"bytes": "1001471"
},
{
"name": "Forth",
"bytes": "997"
},
{
"name": "Makefile",
"bytes": "7242"
},
{
"name": "Python",
"bytes": "235204"
},
{
"name": "Shell",
"bytes": "945"
}
],
"symlink_target": ""
}
|
import os
from google.appengine.runtime import DeadlineExceededError
from werkzeug import (
BaseResponse, Request
)
from kay.utils.test import Client
from kay.app import get_application
from kay.conf import LazySettings
from kay.ext.testutils.gae_test_base import GAETestBase
class BadUrlsTestCase(GAETestBase):
def setUp(self):
s = LazySettings(settings_module='kay.tests.regressiontests.badurls_settings')
self.app = get_application(settings=s)
try:
self.client = Client(self.app, BaseResponse)
except DeadlineExceededError:
pass
def test_bad_url_map_load(self):
self.assertFalse(getattr(self.app.app, 'url_map', None))
|
{
"content_hash": "17e0d10a8c3cf162cdb6b8e5e352d1c5",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 82,
"avg_line_length": 27.875,
"alnum_prop": 0.7548579970104634,
"repo_name": "gmist/kay-ru",
"id": "bff0bba9d9ab230fffe6a2b676283cfe0fda6139",
"size": "670",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "kay/tests/regressiontests/badurls_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "82472"
},
{
"name": "JavaScript",
"bytes": "6131"
},
{
"name": "Python",
"bytes": "2544834"
},
{
"name": "Shell",
"bytes": "137"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import logging
from datetime import datetime, timedelta
import time
from sqlalchemy import Table, Column, Integer, Float, Unicode, DateTime, Date, func
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.schema import ForeignKey
from sqlalchemy.orm import relation
from dateutil.parser import parse as dateutil_parse
from flexget import db_schema, plugin
from flexget.utils.soup import get_soup
from flexget.event import event
from flexget.utils import requests
from flexget.utils.database import year_property
log = logging.getLogger('api_bluray')
Base = db_schema.versioned_base('api_bluray', 0)
# association tables
genres_table = Table('bluray_movie_genres', Base.metadata,
Column('movie_id', Integer, ForeignKey('bluray_movies.id')),
Column('genre_name', Integer, ForeignKey('bluray_genres.name')))
Base.register_table(genres_table)
BASE_URL = 'http://m.blu-ray.com/'
def bluray_request(endpoint, **params):
full_url = BASE_URL + endpoint
response = requests.get(full_url, params=params)
if response.content:
return response.json(strict=False)
def extract_release_date(release_date):
if not release_date or release_date.lower() == 'no release date':
release_date = 'Dec 31, %s' % datetime.now().year
return dateutil_parse(release_date).date()
class BlurayMovie(Base):
__tablename__ = 'bluray_movies'
id = Column(Integer, primary_key=True, autoincrement=False, nullable=False)
name = Column(Unicode)
url = Column(Unicode)
release_date = Column(Date)
year = year_property('release_date')
runtime = Column(Integer)
overview = Column(Unicode)
country = Column(Unicode)
studio = Column(Unicode)
rating = Column(Float)
bluray_rating = Column(Integer)
certification = Column(Unicode)
_genres = relation('BlurayGenre', secondary=genres_table, backref='movies')
genres = association_proxy('_genres', 'name')
updated = Column(DateTime, default=datetime.now, nullable=False)
def __init__(self, title, year):
if year:
title_year = '{} ({})'.format(title, year)
else:
title_year = title
params = {
'section': 'bluraymovies',
'country': 'ALL',
'keyword': title,
'_': str(int(time.time() * 1000))
}
country_params = {'_': params['_']}
try:
response = bluray_request('quicksearch/search.php', **params)
if not response or 'items' not in response:
raise LookupError('No search results found for {} on blu-ray.com'.format(title_year))
search_results = response['items']
countries = bluray_request('countries.json.php', **country_params) or {}
search_results = sorted(search_results, key=lambda k: extract_release_date(k.get('reldate')))
except requests.RequestException as e:
raise LookupError('Error searching for {} on blu-ray.com: {}'.format(title_year, e))
# Simply take the first result unless year does not match
for result in search_results:
if year and str(year) != result['year']:
continue
self.id = int(result['url'].split('/')[-2])
self.name = result['title']
flag = result['flag']
country_code = flag.split('/')[-1].split('.')[0].lower() # eg. http://some/url/UK.png -> uk
# find country based on flag url, default United States
country = 'United States'
for c in countries['countries']:
if c['c'].lower() == country_code:
country = c['n']
self.country = country
self.release_date = extract_release_date(result.get('reldate'))
self.bluray_rating = int(result['rating']) if result['rating'] else None
# Used for parsing some more data, sadly with soup
self.url = result['url']
movie_info_response = requests.get(self.url).content
movie_info = get_soup(movie_info_response)
# runtime and rating, should be the last span tag with class subheading
bluray_info = movie_info.find('div', attrs={'class': 'bluray'})
bluray_info = bluray_info.find_all('span', attrs={'class': 'subheading'})[-1].text.split('|')
self.studio = bluray_info[0].strip()
for info in bluray_info[1:]:
if 'min' in info:
self.runtime = int(info.replace('min', '').strip())
elif 'Rated' in info:
self.certification = info.replace('Rated', '').strip()
# rating
rating_tag = movie_info.find('div', id='ratingscore')
self.rating = float(rating_tag.text.strip()) if rating_tag else None
# Third onecol_content contains some information we want
onecol_content = movie_info.find_all('div', attrs={'class': 'onecol_content'})[2]
# overview, genres etc
contents = onecol_content.find('div').find('div')
overview_tag = contents.find('p')
self.overview = overview_tag.text.strip() if overview_tag else None
# genres
genres_table = contents.find('table')
if not genres_table:
break
genres_content = genres_table.find_all('tr')
if not genres_content:
break
genres = set()
for genre in genres_content:
genres.add(genre.find('td').text.strip())
self._genres = [BlurayGenre(name=genre) for genre in genres]
break
else:
raise LookupError('No search results found for {} on blu-ray.com'.format(title_year))
class BlurayGenre(Base):
__tablename__ = 'bluray_genres'
name = Column(Unicode, primary_key=True, nullable=False)
class BluraySearchResult(Base):
__tablename__ = 'bluray_search_results'
search = Column(Unicode, primary_key=True)
movie_id = Column(Integer, ForeignKey('bluray_movies.id'), nullable=True)
movie = relation(BlurayMovie)
def __init__(self, search, movie_id=None, movie=None):
self.search = search.lower()
if movie_id:
self.movie_id = movie_id
if movie:
self.movie = movie
class ApiBluray(object):
"""Does lookups to Blu-ray.com and provides movie information. Caches lookups."""
@staticmethod
def lookup(title=None, year=None, only_cached=False, session=None):
if not title:
raise LookupError('No criteria specified for blu-ray.com lookup')
title_year = title + ' ({})'.format(year) if year else title
movie_filter = session.query(BlurayMovie).filter(func.lower(BlurayMovie.name) == title.lower())
if year:
movie_filter = movie_filter.filter(BlurayMovie.year == year)
movie = movie_filter.first()
if not movie:
found = session.query(BluraySearchResult). \
filter(BluraySearchResult.search == title_year.lower()).first()
if found and found.movie:
movie = found.movie
if movie:
# Movie found in cache, check if cache has expired. Shamefully stolen from api_tmdb
refresh_time = timedelta(days=2)
if movie.release_date:
if movie.release_date > datetime.now().date() - timedelta(days=7):
# Movie is less than a week old, expire after 1 day
refresh_time = timedelta(days=1)
else:
age_in_years = (datetime.now().date() - movie.release_date).days / 365
refresh_time += timedelta(days=age_in_years * 5)
if movie.updated < datetime.now() - refresh_time and not only_cached:
log.debug('Cache has expired for %s, attempting to refresh from blu-ray.com.', movie.name)
try:
updated_movie = BlurayMovie(title=title, year=year)
except LookupError as e:
log.error('Error refreshing movie details for %s from blu-ray.com, cached info being used. %s',
title, e)
else:
movie = session.merge(updated_movie)
else:
log.debug('Movie %s information restored from cache.', movie.name)
else:
if only_cached:
raise LookupError('Movie %s not found from cache' % title_year)
# There was no movie found in the cache, do a lookup from blu-ray.com
log.verbose('Searching from blu-ray.com `%s`', title)
# Add/merge movie to db
movie = BlurayMovie(title=title, year=year)
# Add to search results table if necessary
if title.lower() != movie.name.lower():
session.add(BluraySearchResult(search=title_year.lower(), movie_id=movie.id))
session.merge(movie)
if not movie:
raise LookupError('Unable to find movie on blu-ray: {}'.format(title_year))
return movie
@event('plugin.register')
def register_plugin():
plugin.register(ApiBluray, 'api_bluray', api_ver=2, interfaces=[])
|
{
"content_hash": "457f7df681d31a24515c43e45305e2f8",
"timestamp": "",
"source": "github",
"line_count": 244,
"max_line_length": 115,
"avg_line_length": 38.93032786885246,
"alnum_prop": 0.5985893251921255,
"repo_name": "LynxyssCZ/Flexget",
"id": "e8ada464419a9d2206e7ae7b5c29e9447a5c64b7",
"size": "9499",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "flexget/plugins/internal/api_bluray.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11875"
},
{
"name": "Dockerfile",
"bytes": "1988"
},
{
"name": "HTML",
"bytes": "79800"
},
{
"name": "JavaScript",
"bytes": "263723"
},
{
"name": "Python",
"bytes": "3371493"
},
{
"name": "SRecode Template",
"bytes": "3"
},
{
"name": "Shell",
"bytes": "1576"
}
],
"symlink_target": ""
}
|
"""The test for sensor device automation."""
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.sensor import DOMAIN
from homeassistant.components.sensor.device_condition import ENTITY_CONDITIONS
from homeassistant.const import (
CONF_PLATFORM,
DEVICE_CLASS_BATTERY,
PERCENTAGE,
STATE_UNKNOWN,
)
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
from tests.components.blueprint.conftest import stub_blueprint_populate # noqa: F401
from tests.testing_config.custom_components.test.sensor import (
DEVICE_CLASSES,
UNITS_OF_MEASUREMENT,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(hass, device_reg, entity_reg, enable_custom_integrations):
"""Test we get the expected conditions from a sensor."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
for device_class in DEVICE_CLASSES:
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES[device_class].unique_id,
device_id=device_entry.id,
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": condition["type"],
"device_id": device_entry.id,
"entity_id": platform.ENTITIES[device_class].entity_id,
}
for device_class in DEVICE_CLASSES
if device_class in UNITS_OF_MEASUREMENT
for condition in ENTITY_CONDITIONS[device_class]
if device_class != "none"
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert conditions == expected_conditions
async def test_get_conditions_no_state(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a sensor."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_ids = {}
for device_class in DEVICE_CLASSES:
entity_ids[device_class] = entity_reg.async_get_or_create(
DOMAIN,
"test",
f"5678_{device_class}",
device_id=device_entry.id,
original_device_class=device_class,
unit_of_measurement=UNITS_OF_MEASUREMENT.get(device_class),
).entity_id
await hass.async_block_till_done()
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": condition["type"],
"device_id": device_entry.id,
"entity_id": entity_ids[device_class],
}
for device_class in DEVICE_CLASSES
if device_class in UNITS_OF_MEASUREMENT
for condition in ENTITY_CONDITIONS[device_class]
if device_class != "none"
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert conditions == expected_conditions
@pytest.mark.parametrize(
"set_state,device_class_reg,device_class_state,unit_reg,unit_state",
[
(False, DEVICE_CLASS_BATTERY, None, PERCENTAGE, None),
(True, None, DEVICE_CLASS_BATTERY, None, PERCENTAGE),
],
)
async def test_get_condition_capabilities(
hass,
device_reg,
entity_reg,
set_state,
device_class_reg,
device_class_state,
unit_reg,
unit_state,
):
"""Test we get the expected capabilities from a sensor condition."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_id = entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES["battery"].unique_id,
device_id=device_entry.id,
original_device_class=device_class_reg,
unit_of_measurement=unit_reg,
).entity_id
if set_state:
hass.states.async_set(
entity_id,
None,
{"device_class": device_class_state, "unit_of_measurement": unit_state},
)
expected_capabilities = {
"extra_fields": [
{
"description": {"suffix": PERCENTAGE},
"name": "above",
"optional": True,
"type": "float",
},
{
"description": {"suffix": PERCENTAGE},
"name": "below",
"optional": True,
"type": "float",
},
]
}
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert len(conditions) == 1
for condition in conditions:
capabilities = await async_get_device_automation_capabilities(
hass, "condition", condition
)
assert capabilities == expected_capabilities
async def test_get_condition_capabilities_none(
hass, device_reg, entity_reg, enable_custom_integrations
):
"""Test we get the expected capabilities from a sensor condition."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
conditions = [
{
"condition": "device",
"device_id": "8770c43885354d5fa27604db6817f63f",
"domain": "sensor",
"entity_id": "sensor.beer",
"type": "is_battery_level",
},
{
"condition": "device",
"device_id": "8770c43885354d5fa27604db6817f63f",
"domain": "sensor",
"entity_id": platform.ENTITIES["none"].entity_id,
"type": "is_battery_level",
},
]
expected_capabilities = {}
for condition in conditions:
capabilities = await async_get_device_automation_capabilities(
hass, "condition", condition
)
assert capabilities == expected_capabilities
async def test_if_state_not_above_below(
hass, calls, caplog, enable_custom_integrations
):
"""Test for bad value conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_battery_level",
}
],
"action": {"service": "test.automation"},
}
]
},
)
assert "must contain at least one of below, above" in caplog.text
async def test_if_state_above(hass, calls, enable_custom_integrations):
"""Test for value conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_battery_level",
"above": 10,
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "{{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_UNKNOWN
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 9)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 11)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "event - test_event1"
async def test_if_state_below(hass, calls, enable_custom_integrations):
"""Test for value conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_battery_level",
"below": 10,
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "{{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_UNKNOWN
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 11)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 9)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "event - test_event1"
async def test_if_state_between(hass, calls, enable_custom_integrations):
"""Test for value conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_battery_level",
"above": 10,
"below": 20,
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "{{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_UNKNOWN
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 9)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 11)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "event - test_event1"
hass.states.async_set(sensor1.entity_id, 21)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
hass.states.async_set(sensor1.entity_id, 19)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "event - test_event1"
|
{
"content_hash": "66723dfa88265e6cf415bd90d3746a87",
"timestamp": "",
"source": "github",
"line_count": 450,
"max_line_length": 88,
"avg_line_length": 33.11555555555555,
"alnum_prop": 0.5497248691450812,
"repo_name": "jawilson/home-assistant",
"id": "5742f7b47c4c0ba144376497f9741beb62fe6f29",
"size": "14902",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/components/sensor/test_device_condition.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2782"
},
{
"name": "Python",
"bytes": "40129467"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
from google.cloud import aiplatform_v1beta1
def sample_update_dataset():
# Create a client
client = aiplatform_v1beta1.DatasetServiceClient()
# Initialize request argument(s)
dataset = aiplatform_v1beta1.Dataset()
dataset.display_name = "display_name_value"
dataset.metadata_schema_uri = "metadata_schema_uri_value"
dataset.metadata.null_value = "NULL_VALUE"
request = aiplatform_v1beta1.UpdateDatasetRequest(
dataset=dataset,
)
# Make the request
response = client.update_dataset(request=request)
# Handle the response
print(response)
# [END aiplatform_v1beta1_generated_DatasetService_UpdateDataset_sync]
|
{
"content_hash": "05ef079c5f0044338e6867ada1cb4056",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 70,
"avg_line_length": 28.083333333333332,
"alnum_prop": 0.7255192878338279,
"repo_name": "googleapis/python-aiplatform",
"id": "6dd3ad3ce168e7e1280e9cff763d42d72c034890",
"size": "2071",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/generated_samples/aiplatform_v1beta1_generated_dataset_service_update_dataset_sync.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "23977004"
},
{
"name": "Shell",
"bytes": "30668"
}
],
"symlink_target": ""
}
|
"""Tensorflow V1 version of the text vectorization preprocessing layer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras.engine import base_preprocessing_layer_v1
from tensorflow.python.keras.layers.preprocessing import index_lookup
from tensorflow.python.ops import lookup_ops
class IndexLookup(index_lookup.IndexLookup,
base_preprocessing_layer_v1.CombinerPreprocessingLayer):
"""IndexLookup layer.
This layer translates a set of arbitray strings or integers into an integer
output via a table-based lookup, with optional out-of-vocabulary handling.
If desired, the user can call this layer's adapt() method on a data set.
When this layer is adapted, it will analyze the dataset, determine the
frequency of individual string or integer values, and create a vocabulary
from them. This vocabulary can have unlimited size or be capped, depending on
the configuration options for this layer; if there are more unique values in
the input than the maximum vocabulary size, the most frequent terms will be
used to create the vocabulary.
Attributes:
max_vocab_size: The maximum size of the vocabulary for this layer. If None,
there is no cap on the size of the vocabulary. Note that the vocabulary
does include OOV buckets, so the effective number of unique values in the
vocabulary is (max_vocab_size - num_oov_buckets) when this value is set.
num_oov_buckets: The number of out-of-vocabulary tokens to use; defaults to
1. If this value is more than 1, OOV inputs are hashed to determine their
OOV value; if this value is 0, passing an OOV input will result in a
runtime error.
reserve_zero: Whether to reserve the index '0', which has a special meaning
in the Keras masking system. If True, the output of this layer will be in
the range [1...max_vocab_size+1); if False, the output will be in the
range [0...max_vocab_size). Defaults to True.
mask_inputs: If True, input values of 0 (for integers) and "" (for strings)
will be treated as masked values and assigned an output value of 0. If
this option is set, reserve_zero must also be set. Defaults to False.
"""
def _use_v1_apis(self):
return True
def _static_table_class(self):
return lookup_ops.StaticHashTableV1
|
{
"content_hash": "d6e49ce23f348f8985f3b63b3580f531",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 79,
"avg_line_length": 48.02,
"alnum_prop": 0.7413577675968347,
"repo_name": "annarev/tensorflow",
"id": "c710108dd5ba74f7e5a4f2eb3d4f5d48aaf8ce59",
"size": "3090",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/layers/preprocessing/index_lookup_v1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1286"
},
{
"name": "Batchfile",
"bytes": "9258"
},
{
"name": "C",
"bytes": "341894"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "49343974"
},
{
"name": "CMake",
"bytes": "195286"
},
{
"name": "Dockerfile",
"bytes": "36386"
},
{
"name": "Go",
"bytes": "1253646"
},
{
"name": "HTML",
"bytes": "4681865"
},
{
"name": "Java",
"bytes": "863222"
},
{
"name": "Jupyter Notebook",
"bytes": "2604741"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "52734"
},
{
"name": "Objective-C",
"bytes": "15650"
},
{
"name": "Objective-C++",
"bytes": "99243"
},
{
"name": "PHP",
"bytes": "1357"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "41289329"
},
{
"name": "Ruby",
"bytes": "553"
},
{
"name": "Shell",
"bytes": "469612"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
}
|
__author__ = 'huqinghua'
# coding=gbk
import string, os, commands, time
from PyUI import *
from MsgBox import *
from PyFrameBase import *
def ShowMessageBox(hwnd, title, caption):
mbox1 = PyFrameCreator()
obj = mbox1.CreateForm(hwnd, 'MsgBox', 'MsgBox')
obj.SetText("LblCaption", title)
obj.SetText("txtMsg", caption)
mbox1.ShowModal()
|
{
"content_hash": "450ad82cceb996888c1b0cf22f6655a9",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 52,
"avg_line_length": 23.5,
"alnum_prop": 0.6622340425531915,
"repo_name": "est/py-ui4win",
"id": "4bad4f3ab67c70233d46c1ed24bdfbafef91650e",
"size": "376",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bin/UICommon.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "648428"
},
{
"name": "C++",
"bytes": "1646895"
},
{
"name": "CMake",
"bytes": "3434"
},
{
"name": "Python",
"bytes": "56423"
},
{
"name": "Shell",
"bytes": "316"
}
],
"symlink_target": ""
}
|
"""This example adds a text add that uses advanced features of upgraded URLS.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: AdGroupAdService.mutate
"""
__author__ = 'Mark Saniscalchi'
from googleads import adwords
from googleads import errors
ADGROUP_ID = 'INSERT_ADGROUP_ID_HERE'
def main(client, adgroup_id):
# Initialize appropriate service.
adgroup_ad_service = client.GetService('AdGroupAdService', version='v201506')
# Create the text ad
text_ad = {
'xsi_type': 'TextAd',
'headline': 'Luxury Cruise to Mars',
'description1': 'Visit the Red Planet in style.',
'description2': 'Low-gravity fun for everyone!',
'displayUrl': 'www.example.com',
# Specify a tracking URL for 3rd party tracking provider. You may specify
# one at customer, campaign, ad group, ad, criterion or feed item levels.
'trackingUrlTemplate': ('http://tracker.example.com/?season={_season}'
'&promocode={_promocode}&u={lpurl}'),
'urlCustomParameters': {
'parameters': [
# Since your tracking URL has two custom parameters, provide
# their values too. This can be provided at campaign, ad group,
# ad, criterion, or feed item levels.
{
'key': 'season',
'value': 'christmas'
},
{
'key': 'promocode',
'value': 'NYC123'
}
]
},
# Specify a list of final URLs. This field cannot be set if URL
# field is set, or finalUrls is unset. This may be specified at ad,
# criterion, and feed item levels.
'finalUrls': [
'http://www.example.com/cruise/space/',
'http://www.example.com/locations/mars/'
],
# Specify a list of final mobile URLs. This field cannot be set if URL
# field is set, or finalUrls is unset. This may be specified at ad,
# criterion, and feed item levels.
'finalMobileUrls': [
'http://mobile.example.com/cruise/space/',
'http://mobile.example.com/locations/mars/'
]
}
text_adgroup_ad = {
'adGroupId': adgroup_id,
'ad': text_ad,
# Optional: Set the status.
'status': 'PAUSED'
}
operations = [{
'operator': 'ADD',
'operand': text_adgroup_ad
}]
response = adgroup_ad_service.mutate(operations)
if 'value' in response:
for adgroup_ad in response['value']:
print ('AdGroupAd with ID %s and display URL \'%s\'was added.'
% (adgroup_ad['ad']['id'], adgroup_ad['ad']['displayUrl']))
print 'Upgraded URL properties:'
print 'Final Urls: %s' % adgroup_ad['ad']['finalUrls']
print 'Final Mobile URLs: %s' % adgroup_ad['ad']['finalMobileUrls']
print ('Tracking URL template: %s'
% adgroup_ad['ad']['trackingUrlTemplate'])
print 'Custom parameters: %s' % adgroup_ad['ad']['urlCustomParameters']
else:
raise errors.GoogleAdsError('Failed to create AdGroupAd.')
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, ADGROUP_ID)
|
{
"content_hash": "21c8f92b1d7556e1988396cd8f2f0267",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 79,
"avg_line_length": 34.785714285714285,
"alnum_prop": 0.612496333235553,
"repo_name": "cctaylor/googleads-python-lib",
"id": "cca69d0c63fa90249cb4792816724844e7491eae",
"size": "4027",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/adwords/v201506/advanced_operations/add_text_ad_with_upgraded_urls.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "492"
},
{
"name": "HTML",
"bytes": "8336"
},
{
"name": "JavaScript",
"bytes": "504"
},
{
"name": "Python",
"bytes": "2774292"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import copy
import ecstasy
import oauth2client.file
import pytest
import requests
from collections import namedtuple
import tests.paths
import lnk.googl.stats
import lnk.googl.info
import lnk.config
VERSION = 1
API = 'https://www.googleapis.com/urlshortener'
KEY = 'AIzaSyAoXKM_AMBafkXqmVeqJ82o9B9NPCTvXxc'
def request_stats(url):
response = requests.get('{0}/v{1}/url'.format(API, VERSION),
params=dict(projection='FULL',
shortUrl=url,
key=KEY))
data = response.json()
data['URL'] = url
del data['kind']
del data['id']
return data
@pytest.fixture(scope='module')
def fixture(request):
Fixture = namedtuple('Fixture', [
'stats',
'info',
'category',
'url',
'timespans',
'full',
'analytics',
'first_level',
'second_level',
'forever_data',
'category_data',
'timespans_data'
])
category = {'browsers': 'browsers'}
url = 'http://goo.gl/3U9mIa'
timespans = ['month', 'day']
first_level = ecstasy.beautify(' <+> {0}', ecstasy.Color.Red)
second_level = ecstasy.beautify(' <-> {0}: {1}', ecstasy.Color.Yellow)
full = request_stats(url)
analytics = full['analytics']
forever_data = analytics['allTime']
category_data = forever_data['browsers']
timespans = ['month', 'week']
timespans_data = [analytics[i] for i in timespans]
with lnk.config.Manager('googl', write=True) as manager:
settings = manager['commands']['stats']['settings']
old = settings['timespan']
stats = lnk.googl.stats.Stats(raw=True)
stats.credentials = oauth2client.file.Storage(tests.paths.CREDENTIALS_PATH)
info = lnk.googl.info.Info(raw=True)
info.credentials = oauth2client.file.Storage(tests.paths.CREDENTIALS_PATH)
def finalize():
with lnk.config.Manager('googl', write=True) as manager:
settings = manager['commands']['stats']['settings']
settings['span'] = old
request.addfinalizer(finalize)
return Fixture(stats,
info,
category,
url,
timespans,
full,
analytics,
first_level,
second_level,
forever_data,
category_data,
timespans_data)
def test_format_makes_lines_pretty(fixture):
result = fixture.stats.format('foo', 'bar')
expected = 'Foo: bar'
assert result == expected
def test_format_handles_special_keys_wells(fixture):
result = fixture.stats.format('shortUrlClicks', 'foo')
expected = 'Clicks: foo'
assert result == expected
result = fixture.stats.format('longUrl', 'foo')
expected = 'Expanded: foo'
assert result == expected
def test_get_timespans_removes_duplicates(fixture):
things = [1, 4, 2, 3, 4, 1, 3, 3]
result = fixture.stats.get_timespans(things, False)
expected = set(things)
assert result == expected
def test_get_timespans_picks_default_timespan_if_no_times():
with lnk.config.Manager('googl', write=True) as manager:
settings = manager['commands']['stats']['settings']
settings['timespan'] = 'day'
stats = lnk.googl.stats.Stats(raw=True)
result = stats.get_timespans([], False)
assert result == set(['day'])
def test_get_timespans_handles_default_forever_well():
with lnk.config.Manager('googl', write=True) as manager:
settings = manager['commands']['stats']['settings']
settings['timespan'] = 'forever'
stats = lnk.googl.stats.Stats(raw=True)
result = stats.get_timespans([], False)
assert result == set(['allTime'])
def test_get_timespans_handles_default_two_hours_well():
with lnk.config.Manager('googl', write=True) as manager:
settings = manager['commands']['stats']['settings']
settings['timespan'] = 'two-hours'
stats = lnk.googl.stats.Stats(raw=True)
result = stats.get_timespans([], False)
assert result == set(['twoHours'])
def test_get_timespans_handles_forever_well(fixture):
result = fixture.stats.get_timespans([], True)
assert result == set(['allTime'])
def test_get_timespans_handles_two_hours_well(fixture):
result = fixture.stats.get_timespans(['two-hours'], False)
assert result == set(['twoHours'])
def test_get_timespans_works(fixture):
result = fixture.stats.get_timespans(fixture.timespans, False)
assert result == set(fixture.timespans)
def test_sub_listify_works_in_normal_cases(fixture):
result = fixture.stats.sub_listify(fixture.category,
fixture.category_data,
None,
False)
expected = []
for point in fixture.category_data:
line = fixture.second_level.format(point['id'], point['count'])
expected.append(line)
assert result == expected
def test_sub_listify_limits_well(fixture):
result = fixture.stats.sub_listify(fixture.category,
fixture.category_data,
1,
False)
expected = [fixture.second_level.format(fixture.category_data[0]['id'],
fixture.category_data[0]['count'])]
assert len(result) == 1
assert result == expected
def test_sub_listify_handles_unknown_well(fixture):
data = [dict(id='unknown', count=123)]
result = fixture.stats.sub_listify(fixture.category,
data,
None,
False)
expected = [fixture.second_level.format('Unknown', 123)]
assert result == expected
def test_sub_listify_leaves_countries_short(fixture):
data = [dict(id='DE', count=123)]
result = fixture.stats.sub_listify('countries',
data,
None,
False)
expected = [fixture.second_level.format('DE', 123)]
assert result == expected
def test_sub_listify_expands_countries(fixture):
data = [dict(id='DE', count=123)]
result = fixture.stats.sub_listify('countries',
data,
None,
True)
expected = [fixture.second_level.format('Germany', 123)]
assert result == expected
def test_get_header_handles_forever_well(fixture):
result = fixture.stats.get_header('allTime')
expected = fixture.first_level.format('Since forever:')
assert result == expected
def test_get_header_handles_two_hours_well(fixture):
result = fixture.stats.get_header('twoHours')
expected = fixture.first_level.format('Last two hours:')
assert result == expected
def test_get_header_handles_normal_cases_well(fixture):
result = fixture.stats.get_header('month')
expected = fixture.first_level.format('Last month:')
assert result == expected
def test_request_format_is_correct(fixture):
fixture.stats.queue.put(fixture.url)
result = fixture.stats.request(False)
assert isinstance(result, dict)
assert 'URL' in result
assert (isinstance(result['URL'], str)
or isinstance(result['URL'], unicode))
assert 'analytics' in result
assert isinstance(result['analytics'], dict)
def test_requests_well_with_info(fixture):
fixture.stats.queue.put(fixture.url)
result = fixture.stats.request(True)
assert result == fixture.full
def test_requests_well_without_info(fixture):
fixture.stats.queue.put(fixture.url)
result = fixture.stats.request(False)
expected = fixture.full.copy()
for i in ['status', 'created', 'longUrl']:
del expected[i]
assert result == expected
def test_listify_formats_timespans_well(fixture):
timespans = ['allTime'] + fixture.timespans
result = fixture.stats.listify(fixture.analytics,
fixture.category,
timespans,
False,
None)
expected = [fixture.first_level.format('Since forever:')]
for i in fixture.timespans:
header = fixture.first_level.format('Last {0}:'.format(i))
expected.append(header)
for i in expected:
assert i in result
def test_listify_filters_timespans_well(fixture):
result = fixture.stats.listify(fixture.analytics,
fixture.category,
fixture.timespans,
False,
None)
for i in fixture.timespans:
line = 'Last {0}:'.format(i)
formatted = fixture.first_level.format(line)
assert formatted in result
unwanted = ['Since forever', 'Last day:', 'Last two hours:']
for i in unwanted:
formatted = fixture.first_level.format(i)
assert formatted not in result
def test_listify_handles_clicks_well(fixture):
result = fixture.stats.listify(fixture.analytics,
{'clicks': 'shortUrlClicks'},
[fixture.timespans[0]],
False,
None)
clicks = fixture.analytics[fixture.timespans[0]]['shortUrlClicks']
line = 'Last {0}: {1}'.format(fixture.timespans[0], clicks)
expected = fixture.first_level.format(line)
assert expected in result
def test_lineify_formats_headers_well(fixture):
data = copy.deepcopy(fixture.full)
result = fixture.stats.lineify(data,
fixture.category,
fixture.timespans,
False,
None)
for key, value in fixture.full.items():
if key != 'analytics':
expected = fixture.stats.format(key, value)
assert expected in result
def test_get_stats_works(fixture):
"""
result = []
fixture.stats.get_stats(result,
fixture.category,
fixture.timespans,
True,
False,
None)
assert result[0] == fixture.full
"""
pass
def test_fetch_works_for_single_url(fixture):
result = fixture.stats.fetch(list(fixture.category.keys()),
[],
fixture.timespans,
False,
None,
True,
False,
[fixture.url])
data = copy.deepcopy(fixture.full)
expected = fixture.stats.lineify(data,
fixture.category,
fixture.timespans,
False,
None)
assert sorted(result[0]) == sorted(expected)
def test_fetch_limits_well(fixture):
result = fixture.stats.fetch(list(fixture.category.keys()),
[],
fixture.timespans,
False,
1,
True,
False,
[fixture.url])
data = copy.deepcopy(fixture.full)
expected = fixture.stats.lineify(data,
fixture.category,
fixture.timespans,
False,
1)
assert sorted(result[0]) == sorted(expected)
def test_fetch_works_for_many_urls(fixture):
other = 'http://goo.gl/XBzv0g'
result = fixture.stats.fetch(list(fixture.category.keys()),
[],
fixture.timespans,
False,
None,
True,
False,
[fixture.url, other])
result = sorted(sorted(i) for i in result)
data = copy.deepcopy(fixture.full)
expected = []
first = fixture.stats.lineify(data,
fixture.category,
fixture.timespans,
False,
None)
expected.append(sorted(first))
other_data = request_stats(other)
second = fixture.stats.lineify(other_data,
fixture.category,
fixture.timespans,
False,
None)
expected.append(sorted(second))
assert result == sorted(expected)
def test_fetch_adds_info_well(fixture):
result = fixture.stats.fetch(list(fixture.category.keys()),
[],
fixture.timespans,
False,
1,
True,
False,
[fixture.url])
data = copy.deepcopy(fixture.full)
expected = fixture.stats.lineify(data,
fixture.category,
fixture.timespans,
False,
1)
assert sorted(result[0]) == sorted(expected)
|
{
"content_hash": "712188591de8f64b5a6591a0db4dfb36",
"timestamp": "",
"source": "github",
"line_count": 426,
"max_line_length": 76,
"avg_line_length": 25.812206572769952,
"alnum_prop": 0.6622408148417607,
"repo_name": "goldsborough/lnk",
"id": "6575de4e088257795c46d99b3a9bf2271170c587",
"size": "11043",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/googl/test_googl_stats.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1860"
},
{
"name": "Python",
"bytes": "255482"
}
],
"symlink_target": ""
}
|
from flask import Blueprint, current_app, send_from_directory
eventum = Blueprint('eventum', __name__)
@eventum.route('/static/<path:filename>', methods=['GET'])
def static(filename):
return send_from_directory(current_app.config['EVENTUM_STATIC_FOLDER'],
filename)
|
{
"content_hash": "12f690c1662fce1f8ba1065c887c4ecf",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 75,
"avg_line_length": 33.77777777777778,
"alnum_prop": 0.6611842105263158,
"repo_name": "danrschlosser/eventum",
"id": "38c977e35c17f7da6f0e1bd6dfb86989b084c99b",
"size": "304",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "eventum/routes/_eventum.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "64540"
},
{
"name": "HTML",
"bytes": "54201"
},
{
"name": "JavaScript",
"bytes": "29243"
},
{
"name": "Python",
"bytes": "214815"
},
{
"name": "Shell",
"bytes": "1845"
}
],
"symlink_target": ""
}
|
"""
pygments.lexers.compiled
~~~~~~~~~~~~~~~~~~~~~~~~
Just export lexer classes previously contained in this module.
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexers.jvm import JavaLexer, ScalaLexer
from pygments.lexers.c_cpp import CLexer, CppLexer
from pygments.lexers.d import DLexer
from pygments.lexers.objective import ObjectiveCLexer, \
ObjectiveCppLexer, LogosLexer
from pygments.lexers.go import GoLexer
from pygments.lexers.rust import RustLexer
from pygments.lexers.c_like import ECLexer, ValaLexer, CudaLexer
from pygments.lexers.pascal import DelphiLexer, Modula2Lexer, AdaLexer
from pygments.lexers.business import CobolLexer, CobolFreeformatLexer
from pygments.lexers.fortran import FortranLexer
from pygments.lexers.prolog import PrologLexer
from pygments.lexers.python import CythonLexer
from pygments.lexers.graphics import GLShaderLexer
from pygments.lexers.ml import OcamlLexer
from pygments.lexers.basic import BlitzBasicLexer, BlitzMaxLexer, MonkeyLexer
from pygments.lexers.dylan import DylanLexer, DylanLidLexer, DylanConsoleLexer
from pygments.lexers.ooc import OocLexer
from pygments.lexers.felix import FelixLexer
from pygments.lexers.nimrod import NimrodLexer
from pygments.lexers.crystal import CrystalLexer
__all__ = []
|
{
"content_hash": "1febcbe61c5464b4da00ae0ee647217a",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 78,
"avg_line_length": 41.24242424242424,
"alnum_prop": 0.8104335047759,
"repo_name": "sonntagsgesicht/regtest",
"id": "13aa39ce2d57100b096f1e5913b1371e04ad941d",
"size": "1361",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": ".aux/venv/lib/python3.9/site-packages/pygments/lexers/compiled.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13888"
}
],
"symlink_target": ""
}
|
from typing import Tuple, Callable, Optional, List, TypeVar
from ..model import Model
from ..config import registry
from ..types import Floats1d, Floats2d, Floats3d, Floats4d, FloatsXd, Ragged, Padded
# fmt: off
InT = TypeVar("InT", List[Floats1d], List[Floats2d], List[Floats3d], List[Floats4d], Ragged, Padded, FloatsXd)
# fmt: on
@registry.layers("residual.v1")
def residual(layer: Model[InT, InT]) -> Model[InT, InT]:
return Model(
f"residual({layer.name})",
forward,
init=init,
layers=[layer],
dims={
"nO": layer.get_dim("nO") if layer.has_dim("nO") else None,
"nI": layer.get_dim("nI") if layer.has_dim("nI") else None,
},
)
def forward(model: Model[InT, InT], X: InT, is_train: bool) -> Tuple[InT, Callable]:
def backprop(d_output: InT) -> InT:
dX = backprop_layer(d_output)
if isinstance(d_output, list):
return [d_output[i] + dX[i] for i in range(len(d_output))]
elif isinstance(d_output, Ragged):
return Ragged(d_output.data + dX.data, dX.lengths)
elif isinstance(X, Padded):
dX.data += d_output.data
return dX
else:
return d_output + dX
Y, backprop_layer = model.layers[0](X, is_train)
if isinstance(X, list):
return [X[i] + Y[i] for i in range(len(X))], backprop
elif isinstance(X, Ragged):
return Ragged(X.data + Y.data, X.lengths), backprop
elif isinstance(X, Padded):
Y.data += X.data
return Y, backprop
else:
return X + Y, backprop
def init(
model: Model[InT, InT], X: Optional[InT] = None, Y: Optional[InT] = None
) -> Model[InT, InT]:
first_layer = model.layers[0]
if first_layer.has_dim("nO") is None:
first_layer.initialize(X=X, Y=Y)
else:
first_layer.initialize(X=X)
if first_layer.has_dim("nO"):
model.set_dim("nO", first_layer.get_dim("nO"))
if first_layer.has_dim("nI"):
model.set_dim("nI", first_layer.get_dim("nI"))
return model
|
{
"content_hash": "e6880bf79a168c8c96640d4b844670f0",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 110,
"avg_line_length": 32.328125,
"alnum_prop": 0.5935234412759788,
"repo_name": "spacy-io/thinc",
"id": "612c008c6fce780e1071538e6fe6634e066d0fc2",
"size": "2069",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "thinc/layers/residual.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "259926"
},
{
"name": "C++",
"bytes": "5131"
},
{
"name": "Python",
"bytes": "135654"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from . import pubnub # noqa
from . import requests # noqa
|
{
"content_hash": "791f06c834652b9e11f3b9a3d7d61c73",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 38,
"avg_line_length": 25,
"alnum_prop": 0.72,
"repo_name": "victorlin/bugbuzz-python",
"id": "d0a5f4005929512d97e15c9614f83b545a0d3356",
"size": "100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bugbuzz/packages/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27247"
},
{
"name": "Shell",
"bytes": "88"
}
],
"symlink_target": ""
}
|
from unittest import mock
from nova.objects import external_event as external_event_obj
from nova.tests.unit.objects import test_objects
class _TestInstanceExternalEventObject(object):
def test_make_key(self):
key = external_event_obj.InstanceExternalEvent.make_key('foo', 'bar')
self.assertEqual('foo-bar', key)
def test_make_key_no_tag(self):
key = external_event_obj.InstanceExternalEvent.make_key('foo')
self.assertEqual('foo', key)
def test_key(self):
event = external_event_obj.InstanceExternalEvent(
name='network-changed',
tag='bar')
with mock.patch.object(event, 'make_key') as make_key:
make_key.return_value = 'key'
self.assertEqual('key', event.key)
make_key.assert_called_once_with('network-changed', 'bar')
def test_event_names(self):
for event in external_event_obj.EVENT_NAMES:
external_event_obj.InstanceExternalEvent(name=event, tag='bar')
self.assertRaises(ValueError,
external_event_obj.InstanceExternalEvent,
name='foo', tag='bar')
class TestInstanceExternalEventObject(test_objects._LocalTest,
_TestInstanceExternalEventObject):
pass
class TestRemoteInstanceExternalEventObject(test_objects._RemoteTest,
_TestInstanceExternalEventObject):
pass
|
{
"content_hash": "f11e584c885d6b0282d79057a61fdbdc",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 78,
"avg_line_length": 36.26829268292683,
"alnum_prop": 0.6260928043039677,
"repo_name": "mahak/nova",
"id": "58c45c25497399b7944aed4a1e274390ff157db3",
"size": "2096",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/tests/unit/objects/test_external_event.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "3545"
},
{
"name": "Mako",
"bytes": "1952"
},
{
"name": "Python",
"bytes": "23261880"
},
{
"name": "Shell",
"bytes": "28113"
},
{
"name": "Smarty",
"bytes": "507244"
}
],
"symlink_target": ""
}
|
import re
from random import randrange
def test_phones_on_home_page(app):
index = 0
user_from_home_page = app.user.get_user_list()[index]
user_from_edit_page = app.user.get_user_info_from_edit_page(index)
assert user_from_home_page.all_phones_from_home_page == merge_phones_like_on_home_page(user_from_edit_page)
def test_phones_on_user_view_page(app):
index = 0
user_from_view_page = app.user.get_user_info_from_view_page(index)
user_from_edit_page = app.user.get_user_info_from_edit_page(index)
assert user_from_view_page.homephone == user_from_edit_page.homephone
assert user_from_view_page.workphone == user_from_edit_page.workphone
assert user_from_view_page.mobilephone == user_from_edit_page.mobilephone
assert user_from_view_page.additionalphone == user_from_edit_page.additionalphone
def clear(s):
return re.sub("[() -]", "", s)
def merge_phones_like_on_home_page(user):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[user.homephone, user.mobilephone, user.workphone, user.additionalphone]))))
|
{
"content_hash": "78815c15c6a53c3a6a225d118fd4cc7a",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 115,
"avg_line_length": 40.5,
"alnum_prop": 0.6534979423868312,
"repo_name": "VanillaPupa/python_training",
"id": "fc4c3e535d0675aba2ddd4f27ca713590bb7614d",
"size": "1215",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_phones.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "35146"
}
],
"symlink_target": ""
}
|
from eve import Eve
from eve_sqlalchemy import SQL as _SQL
from eve_sqlalchemy.validation import ValidatorSQL
from config import config
from .core import db
from .models import Contact
class SQL(_SQL):
driver = db
def create_app(config_name):
app = Eve(validator=ValidatorSQL, data=SQL, settings=config[config_name].EVE_SETTINGS)
app.config.from_object(config[config_name])
db.init_app(app)
return app
|
{
"content_hash": "6145b54d190d35e0226f2c7f67ab852a",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 90,
"avg_line_length": 20.476190476190474,
"alnum_prop": 0.7418604651162791,
"repo_name": "abkfenris/proof_companion",
"id": "fa3503d67b66d462566308f93833d99fa5e13d51",
"size": "430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "proof_companion/app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "6199"
}
],
"symlink_target": ""
}
|
import sys, os
sys.path.insert(0, os.path.abspath('../readthedocs'))
import settings.sqlite
from django.core.management import setup_environ
setup_environ(settings.sqlite)
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Read The Docs'
copyright = u'2010, Eric Holscher, Charlie Leifer, Bobby Grace'
version = '1.0'
release = '1.0'
exclude_patterns = ['_build']
default_role = 'obj'
pygments_style = 'sphinx'
intersphinx_mapping = {
'python': ('http://python.readthedocs.org/en/latest/', None),
'django': ('http://django.readthedocs.org/en/latest/', None),
'sphinx': ('http://sphinx.readthedocs.org/en/latest/', None),
}
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if on_rtd:
html_theme = 'default'
else:
html_theme = 'nature'
html_static_path = ['_static']
htmlhelp_basename = 'ReadTheDocsdoc'
latex_documents = [
('index', 'ReadTheDocs.tex', u'Read The Docs Documentation',
u'Eric Holscher, Charlie Leifer, Bobby Grace', 'manual'),
]
man_pages = [
('index', 'read-the-docs', u'Read The Docs Documentation',
[u'Eric Holscher, Charlie Leifer, Bobby Grace'], 1)
]
|
{
"content_hash": "a78ee9a3a878c47a9d270efc9fd4d7fe",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 63,
"avg_line_length": 32.648648648648646,
"alnum_prop": 0.6879139072847682,
"repo_name": "alex/readthedocs.org",
"id": "950f83e60ab63c80c18b67c126765811f22d0dd6",
"size": "1234",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "46130"
},
{
"name": "JavaScript",
"bytes": "44647"
},
{
"name": "Python",
"bytes": "629442"
},
{
"name": "Ruby",
"bytes": "11820"
},
{
"name": "Shell",
"bytes": "4953"
}
],
"symlink_target": ""
}
|
from accounts.models import JournalWriter
from journal import status_codes
from utils.response_utils import JournalResponse
class LoginRequiredMixin(object):
def check_user(self, request):
user = request.user
if user.is_authenticated() and user.is_active and JournalWriter.objects.get(user=user):
return True
elif not user.is_active:
return False
def dispatch(self, request, *args, **kwargs):
if not self.check_user(request=request):
return JournalResponse(response=status_codes.INVALID_USER)
return super(LoginRequiredMixin, self).dispatch(request, *args, **kwargs)
|
{
"content_hash": "3d26b76aefa304e9895cbd66cf5307c3",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 95,
"avg_line_length": 36.388888888888886,
"alnum_prop": 0.6992366412213741,
"repo_name": "subramaniank/journal",
"id": "54804673f0b7385e77ceec322bdb40e49cddc383",
"size": "655",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "journal/journal/accounts/mixins.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24532"
},
{
"name": "Shell",
"bytes": "0"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.