hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7953453e9cd7dc9fc203cac750bddd82b5cb145d
| 82,725
|
py
|
Python
|
Engine/Extras/Maya_AnimationRiggingTools/ArtToolsOSX/MayaTools/General/Scripts/ART_importMotion.py
|
windystrife/UnrealEngine_NVIDIAGameWork
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
[
"MIT"
] | 1
|
2022-01-29T18:36:12.000Z
|
2022-01-29T18:36:12.000Z
|
Engine/Extras/Maya_AnimationRiggingTools/ArtToolsOSX/MayaTools/General/Scripts/ART_importMotion.py
|
windystrife/UnrealEngine_NVIDIAGameWork
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
[
"MIT"
] | 2
|
2015-06-21T17:38:11.000Z
|
2015-06-22T20:54:42.000Z
|
Engine/Extras/Maya_AnimationRiggingTools/ArtToolsOSX/MayaTools/General/Scripts/ART_importMotion.py
|
PopCap/GameIdea
|
201e1df50b2bc99afc079ce326aa0a44b178a391
|
[
"BSD-2-Clause"
] | 1
|
2021-04-27T08:48:33.000Z
|
2021-04-27T08:48:33.000Z
|
import maya.cmds as cmds
import maya.mel as mel
from functools import partial
import os, cPickle
import math
class ImportMotionUI():
def __init__(self):
#create class variables
self.widgets = {}
#find out which project we are in
references = cmds.ls(type = "reference")
for ref in references:
try:
self.project = cmds.referenceQuery(ref, filename = True).rpartition("Projects/")[2].partition("/")[0]
except:
pass
#get access to our maya tools
toolsPath = os.path.join(cmds.internalVar(usd = True), "mayaTools.txt")
if os.path.exists(toolsPath):
f = open(toolsPath, 'r')
self.mayaToolsDir = f.readline()
f.close()
#check to see if window exists, if so, delete
if cmds.window("importMotionUI", exists = True):
cmds.deleteUI("importMotionUI")
#build window
self.widgets["window"] = cmds.window("importMotionUI", w = 700, h = 400, title = "Import Motion", sizeable = False)
#create the main layout
self.widgets["topLevelLayout"] = cmds.columnLayout()
#create the rowColumnLayout (left side for the different ways one can import motion, the right side with that method's settings
self.widgets["rowColumnLayout"] = cmds.rowColumnLayout(w = 700, h = 400, nc = 2, cw = [(1, 150), (2, 550)], parent = self.widgets["topLevelLayout"])
#create the columnLayout for the left side
self.widgets["leftSideButtonColumn"] = cmds.columnLayout(w = 150, h = 400, parent = self.widgets["rowColumnLayout"], cat = ["both", 5], rs = 5)
#and create the frame layout for the right side
self.widgets["rightSideFrame"] = cmds.frameLayout(w = 550, h = 400, collapsable = False, borderStyle = "etchedIn", labelVisible = False, parent = self.widgets["rowColumnLayout"])
#create the buttons for the different methods of importing motion
self.widgets["importMotionMethods"] = cmds.iconTextRadioCollection()
self.widgets["importMotion_mocap"] = cmds.iconTextRadioButton(select = True, w = 140, h = 50, parent = self.widgets["leftSideButtonColumn"], image = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMocap_off.bmp"), selectionImage = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMocap_on.bmp"))
self.widgets["importMotion_anims"] = cmds.iconTextRadioButton(select = False, w = 140, h = 50, parent = self.widgets["leftSideButtonColumn"], image = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importAnim_off.bmp"), selectionImage = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importAnim_on.bmp"))
#create the elements for the right column
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#IMPORT MOCAP
self.widgets["importMocapForm"] = cmds.formLayout(w = 500, h = 400, parent = self.widgets["rightSideFrame"])
#text labels
fbxLabel = cmds.text(label = "FBX File:", font = "boldLabelFont")
importMethodLabel = cmds.text(label = "Import Method:", font = "boldLabelFont")
frameOffsetLabel = cmds.text(label = "Frame Offset:", font = "boldLabelFont")
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(fbxLabel, "top", 13), (fbxLabel, "left", 10)])
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(importMethodLabel, "top", 140), (importMethodLabel, "left", 10)])
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(frameOffsetLabel, "top", 210), (frameOffsetLabel, "left", 10)])
#fbxImport
self.widgets["fbxImportTextField"] = cmds.textField(w = 400, text = "", enable = True)
self.widgets["fbxImportBrowseButton"] = cmds.symbolButton(w = 30, h = 30, image = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "browse.bmp"), c = self.fbxBrowse)
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["fbxImportTextField"], "top", 10), (self.widgets["fbxImportTextField"], "left", 70)])
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["fbxImportBrowseButton"], "top", 5), (self.widgets["fbxImportBrowseButton"], "left", 475)])
#character list
self.widgets["importMocap_characterList"] = cmds.optionMenu(w = 240, h = 50)
self.widgets["importMocap_characterThumb"] = cmds.image(w = 50, h = 50)
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["importMocap_characterList"], "top", 45), (self.widgets["importMocap_characterList"], "left", 10)])
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["importMocap_characterThumb"], "top", 45), (self.widgets["importMocap_characterThumb"], "left", 255)])
#import method
self.widgets["importMethodRadioCollection"] = cmds.radioCollection()
self.widgets["importMethod_FK"] = cmds.radioButton(label = "FK", select = True)
self.widgets["importMethod_IK"] = cmds.radioButton(label = "IK")
self.widgets["importMethod_Both"] = cmds.radioButton(label = "Both")
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["importMethod_FK"], "top", 165), (self.widgets["importMethod_FK"], "left", 10)])
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["importMethod_IK"], "top", 165), (self.widgets["importMethod_IK"], "left", 80)])
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["importMethod_Both"], "top", 165), (self.widgets["importMethod_Both"], "left", 150)])
#frame offset
self.widgets["frameOffsetField"] = cmds.intField(value=0, w = 100)
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["frameOffsetField"], "top", 235), (self.widgets["frameOffsetField"], "left", 10)])
#apply motion to parts
self.widgets["importMotionTo_Frame"] = cmds.frameLayout( w= 220, h = 350, label = "Apply To Which Parts:", bs = "etchedIn", collapsable = False)
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["importMotionTo_Frame"], "top", 40), (self.widgets["importMotionTo_Frame"], "right", 10)])
self.widgets["importMotionTo_Form"] = cmds.formLayout( w= 240, h = 280, parent = self.widgets["importMotionTo_Frame"])
self.widgets["importMotionTo_HeadButton"] = cmds.iconTextCheckBox(w = 55, h = 55, image = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_head.bmp"), selectionImage = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_head_on.bmp"), value = True)
self.widgets["importMotionTo_SpineButton"] = cmds.iconTextCheckBox(w = 55, h = 100, image = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_torso.bmp"), selectionImage = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_torso_on.bmp"), value = True)
self.widgets["importMotionTo_lArmButton"] = cmds.iconTextCheckBox(w = 30, h = 100, image = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_arm.bmp"), selectionImage = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_arm_on.bmp"), value = True)
self.widgets["importMotionTo_rArmButton"] = cmds.iconTextCheckBox(w = 30, h = 100, image = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_arm_r.bmp"), selectionImage = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_arm_r_on.bmp"), value = True)
self.widgets["importMotionTo_lLegButton"] = cmds.iconTextCheckBox(w = 30, h = 110, image = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_leg.bmp"), selectionImage = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_leg_on.bmp"), value = True)
self.widgets["importMotionTo_rLegButton"] = cmds.iconTextCheckBox(w = 30, h = 110, image = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_leg.bmp"), selectionImage = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_leg_on.bmp"), value = True)
cmds.formLayout(self.widgets["importMotionTo_Form"], edit = True, af = [(self.widgets["importMotionTo_HeadButton"], "top", 25), (self.widgets["importMotionTo_HeadButton"], "right", 100)])
cmds.formLayout(self.widgets["importMotionTo_Form"], edit = True, af = [(self.widgets["importMotionTo_SpineButton"], "top", 80), (self.widgets["importMotionTo_SpineButton"], "right", 100)])
cmds.formLayout(self.widgets["importMotionTo_Form"], edit = True, af = [(self.widgets["importMotionTo_lArmButton"], "top", 80), (self.widgets["importMotionTo_lArmButton"], "right", 70)])
cmds.formLayout(self.widgets["importMotionTo_Form"], edit = True, af = [(self.widgets["importMotionTo_rArmButton"], "top", 80), (self.widgets["importMotionTo_rArmButton"], "right", 155)])
cmds.formLayout(self.widgets["importMotionTo_Form"], edit = True, af = [(self.widgets["importMotionTo_lLegButton"], "top", 180), (self.widgets["importMotionTo_lLegButton"], "right", 98)])
cmds.formLayout(self.widgets["importMotionTo_Form"], edit = True, af = [(self.widgets["importMotionTo_rLegButton"], "top", 180), (self.widgets["importMotionTo_rLegButton"], "right", 125)])
#import button
self.widgets["importMocap_importButton"] = cmds.symbolButton(c = self.importMocap, w = 300, h = 50, image = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "importMotion_importButton.bmp"), parent = self.widgets["importMocapForm"])
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["importMocap_importButton"], "bottom", 10), (self.widgets["importMocap_importButton"], "left", 10)])
#heel solve checkbox
self.widgets["heelSolverCB"] = cmds.checkBox(label = "Solve Foot Roll", v = False, parent = self.widgets["importMocapForm"])
self.widgets["kneeSolverCB"] = cmds.checkBox(label = "Solve Knee Vectors", v = True, parent = self.widgets["importMocapForm"])
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["heelSolverCB"], "bottom", 110), (self.widgets["heelSolverCB"], "left", 10)])
cmds.formLayout(self.widgets["importMocapForm"], edit = True, af = [(self.widgets["kneeSolverCB"], "bottom", 110), (self.widgets["kneeSolverCB"], "left", 140)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#IMPORT ANIMATION
self.widgets["importAnimationFormLayout"] = cmds.formLayout(w = 500, h = 400, parent = self.widgets["rightSideFrame"], visible = False)
#character list
self.widgets["importAnim_characterList"] = cmds.optionMenu(w = 200, h = 30)
self.widgets["importAnim_characterThumb"] = cmds.image(w = 50, h = 50)
cmds.formLayout(self.widgets["importAnimationFormLayout"], edit = True, af = [(self.widgets["importAnim_characterList"], "top", 30), (self.widgets["importAnim_characterList"], "left", 275)])
cmds.formLayout(self.widgets["importAnimationFormLayout"], edit = True, af = [(self.widgets["importAnim_characterThumb"], "top", 30), (self.widgets["importAnim_characterThumb"], "right", 10)])
#create the projects drop down
label = cmds.text(label = "Projects:", align = 'right')
self.widgets["importAnimProjectsList"] = cmds.optionMenu(w = 250,h = 30, parent = self.widgets["importAnimationFormLayout"], cc = self.getProjCategories)
cmds.formLayout(self.widgets["importAnimationFormLayout"], edit = True, af = [(label, "top", 10), (label, "left", 10)])
cmds.formLayout(self.widgets["importAnimationFormLayout"], edit = True, af = [(self.widgets["importAnimProjectsList"], "top", 30), (self.widgets["importAnimProjectsList"], "left", 10)])
#create the categories layout
self.widgets["categoriesList_topLayout"] = cmds.frameLayout(w = 250, h = 300, bs = "etchedIn", cll = False, cl = False, lv = False, parent = self.widgets["importAnimationFormLayout"])
cmds.formLayout(self.widgets["importAnimationFormLayout"], edit = True, af = [(self.widgets["categoriesList_topLayout"], "bottom", 10), (self.widgets["categoriesList_topLayout"], "left", 10)])
self.widgets["categoriesList_scrollLayout"] = cmds.scrollLayout(w = 240, h = 300, hst = 0, parent = self.widgets["categoriesList_topLayout"])
self.widgets["categoriesList_columnLayout"] = cmds.columnLayout(w = 220, parent = self.widgets["categoriesList_scrollLayout"])
#create the animation list layout
self.widgets["animList_topLayout"] = cmds.frameLayout(w = 260, h = 300, bs = "etchedIn", cll = False, cl = False, lv = False, parent = self.widgets["importAnimationFormLayout"], bgc = [.2, .2, .2])
cmds.formLayout(self.widgets["importAnimationFormLayout"], edit = True, af = [(self.widgets["animList_topLayout"], "bottom", 10), (self.widgets["animList_topLayout"], "right", 10)])
self.widgets["animList_scrollLayout"] = cmds.scrollLayout(w = 260, h = 300, hst = 0, parent = self.widgets["animList_topLayout"], bgc = [.2, .2, .2])
self.widgets["animList_columnLayout"] = cmds.columnLayout(w = 220, parent = self.widgets["animList_scrollLayout"], bgc = [.2, .2, .2])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#Edit radio button commands
cmds.iconTextRadioButton(self.widgets["importMotion_mocap"], edit = True, onc = partial(self.switchMode, "fbx"))
cmds.iconTextRadioButton(self.widgets["importMotion_anims"], edit = True, onc = partial(self.switchMode, "anim"))
#show the window
cmds.showWindow(self.widgets["window"])
#populate the dropdown with the characters
self.getCharacters()
self.changeActiveCharacter()
#populate the import animations project list
self.getProjects()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getProjects(self, *args):
projectPath = os.path.join(self.mayaToolsDir, "General", "ART", "Projects")
projects = os.listdir(projectPath)
for proj in projects:
cmds.menuItem(label = proj, parent = self.widgets["importAnimProjectsList"])
#set to favorite if it exists
settingsLocation = os.path.join(self.mayaToolsDir, "General", "Scripts", "projectSettings.txt")
if os.path.exists(settingsLocation):
f = open(settingsLocation, 'r')
settings = cPickle.load(f)
favoriteProject = settings.get("FavoriteProject")
try:
cmds.optionMenu(self.widgets["importAnimProjectsList"], edit = True, v = favoriteProject)
except:
pass
self.getProjCategories()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getProjCategories(self, *args):
#clear out all children first
children = cmds.columnLayout(self.widgets["categoriesList_columnLayout"], q = True, childArray = True)
if children != None:
for child in children:
cmds.deleteUI(child)
selectedProj = cmds.optionMenu(self.widgets["importAnimProjectsList"], q = True, v = True)
categoryPath = os.path.join(self.mayaToolsDir, "General", "ART", "Projects", selectedProj, "Animations")
if not os.path.exists(categoryPath):
os.makedirs(categoryPath)
categories = os.listdir(categoryPath)
self.widgets["animationCategories"] = cmds.iconTextRadioCollection()
for item in categories:
self.createCategoryEntry(item, selectedProj)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def createCategoryEntry(self, categoryName, project, *args):
cmds.iconTextRadioButton(onc = partial(self.getAnimations, categoryName, project), parent = self.widgets["categoriesList_columnLayout"], image = "menuIconFile.png", w = 220, h = 30, style = "iconAndTextHorizontal", label = categoryName, cl = self.widgets["animationCategories"], sl =True)
#get animations for seleted category
self.getAnimations(categoryName, project)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getAnimations(self, categoryName, project, *args):
#clear out all animation files first
children = cmds.columnLayout(self.widgets["animList_columnLayout"], q = True, childArray = True)
if children != None:
for child in children:
cmds.deleteUI(child)
#get animations and populate UI
animFiles = os.listdir(os.path.join(self.mayaToolsDir, "General", "ART", "Projects", project, "Animations", categoryName))
for file in animFiles:
niceName = file.partition(".")[0]
button = cmds.iconTextButton( parent = self.widgets["animList_columnLayout"], image = "ghostOff.png", w = 220, h = 30, bgc = [.2, .2, .2], style = "iconAndTextHorizontal", label = niceName, ann = (project + ", " + categoryName))
#create the popup menu for the button
menu = cmds.popupMenu(b = 1, parent =button)
cmds.menuItem(label = "Import Options for " + file.partition(".")[0] + " animation:", parent = menu, enable = False)
cmds.menuItem(divider = True, parent = menu)
cmds.menuItem(label = "Import All Data", parent = menu, c = partial(self.importAnimation, file, project, categoryName))
cmds.menuItem(label = "Import Onto Selected Controls", parent = menu, c = partial(self.importAnimationOnSelection, file, project, categoryName))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def importAnimationOnSelection(self, animationName, project, categoryName, *args):
selected = cmds.ls(sl = True)
animPath = os.path.join(self.mayaToolsDir, "General", "ART", "Projects", project, "Animations", categoryName, animationName)
character = cmds.optionMenu(self.widgets["importAnim_characterList"], q = True, v = True)
f = open(animPath, 'r')
animData = cPickle.load(f)
f.close()
#create a progress window
progressWindow = cmds.progressWindow(title='Importing Animation', progress = 10, status='Importing...', isInterruptable=True )
progressIncrement = 100/len(animData)
#go through all of the animData ([layer, control, [curves and key pairings]])
for data in animData:
#cmds.progressWindow(edit = True, progress = progressIncrement, status='Importing...')
#sort out the incoming data
control = data[0]
keyInfo = data[1:]
layers = []
for info in keyInfo:
for i in info:
layer = i[0]
if layer != None:
layers.append(layer)
#if an object did have layers, we need to sort out the information
if layers != []:
#create needed layers
for layer in layers:
if layer != "BaseAnimation":
#see if our layer exists, if not create and select
try:
cmds.select(character + ":" + control)
if cmds.animLayer(layer, q = True, exists = True) == False:
cmds.animLayer(layer, addSelectedObjects = True)
else:
cmds.animLayer(layer, edit = True, addSelectedObjects = True)
except:
pass
#first setup base animation before other layers
animationLayersAll = cmds.ls(type = "animLayer")
for l in animationLayersAll:
cmds.animLayer(l, edit = True, selected = False)
cmds.animLayer("BaseAnimation", edit = True, selected = True)
for info in keyInfo:
for i in info:
layer = i[0]
if layer == None:
layer = "BaseAnimation"
attrs = i[1]
for attr in attrs:
attribute = attr[0]
if cmds.objExists(character + ":" + control + "." + attribute):
keys = attr[1]
for key in keys:
frame = key[0]
value = key[1]
#grab tangent info if there was any
try:
tangentInfo = key[2]
except:
tangentInfo = [None, None, None, None, None, None]
pass
if cmds.objExists(character + ":" + control):
if character + ":" + control in selected:
cmds.setKeyframe(character + ":" + control, animLayer = layer, at = attribute, t = frame, value = value, noResolve = True)
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), itt = tangentInfo[1])
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ott = tangentInfo[1])
if tangentInfo[2] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, ia = tangentInfo[2])
if tangentInfo[3] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, oa = tangentInfo[3])
if tangentInfo[4] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), iw = tangentInfo[4])
if tangentInfo[5] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ow = tangentInfo[5])
#refresh scene
cmds.select(character + ":" + control)
cmds.setToolTo('moveSuperContext')
cmds.select(clear = True)
else:
for info in keyInfo:
for i in info:
attrs = i[1]
for attr in attrs:
attribute = attr[0]
if cmds.objExists(character + ":" + control + "." + attribute):
keys = attr[1]
for key in keys:
frame = key[0]
value = key[1]
#grab tangent info if there was any
try:
tangentInfo = key[2]
except:
tangentInfo = [None, None, None, None, None, None]
pass
if cmds.objExists(character + ":" + control):
if character + ":" + control in selected:
if cmds.animLayer("BaseAnimation", q = True, exists = True):
cmds.setKeyframe(character + ":" + control, animLayer = "BaseAnimation", at = attribute, t = frame, value = value, noResolve = True)
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), itt = tangentInfo[1])
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ott = tangentInfo[1])
if tangentInfo[2] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, ia = tangentInfo[2])
if tangentInfo[3] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, oa = tangentInfo[3])
if tangentInfo[4] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), iw = tangentInfo[4])
if tangentInfo[5] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ow = tangentInfo[5])
else:
cmds.setKeyframe(character + ":" + control, at = attribute, t = frame, value = value)
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), itt = tangentInfo[1])
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ott = tangentInfo[1])
if tangentInfo[2] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, ia = tangentInfo[2])
if tangentInfo[3] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, oa = tangentInfo[3])
if tangentInfo[4] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), iw = tangentInfo[4])
if tangentInfo[5] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ow = tangentInfo[5])
cmds.progressWindow(progressWindow, endProgress=1)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def importAnimation(self, animationName, project, categoryName, *args):
animPath = os.path.join(self.mayaToolsDir, "General", "ART", "Projects", project, "Animations" , categoryName, animationName)
character = cmds.optionMenu(self.widgets["importAnim_characterList"], q = True, v = True)
f = open(animPath, 'r')
animData = cPickle.load(f)
f.close()
#create a progress window
progressWindow = cmds.progressWindow(title='Importing Animation', progress = 10, status='Importing...', isInterruptable=True )
progressIncrement = 100/len(animData)
#go through all of the animData ([layer, control, [curves and key pairings]])
for data in animData:
#sort out the incoming data
control = data[0]
keyInfo = data[1:]
layers = []
for info in keyInfo:
for i in info:
layer = i[0]
if layer != None:
layers.append(layer)
#if an object did have layers, we need to sort out the information
if layers != []:
#create needed layers
for layer in layers:
if layer != "BaseAnimation":
#see if our layer exists, if not create and select
try:
cmds.select(character + ":" + control)
if cmds.animLayer(layer, q = True, exists = True) == False:
cmds.animLayer(layer, addSelectedObjects = True)
else:
cmds.animLayer(layer, edit = True, addSelectedObjects = True)
except:
pass
#first setup base animation before other layers
animationLayersAll = cmds.ls(type = "animLayer")
for l in animationLayersAll:
cmds.animLayer(l, edit = True, selected = False)
cmds.animLayer("BaseAnimation", edit = True, selected = True)
for info in keyInfo:
#info is the array that includes all of the information for a control per layer. if info > 0, that means that control has animation on more than 1 layer.
for i in info:
#i is the array that has 2 pieces. the first element is the layer, the 2nd element is all of the keyframe data
layer = i[0]
if layer == None:
layer = "BaseAnimation"
attrs = i[1]
for attr in attrs:
#print layer, attr
attribute = attr[0]
if cmds.objExists(character + ":" + control + "." + attribute):
keys = attr[1]
for key in keys:
frame = key[0]
value = key[1]
#grab tangent info if there was any
try:
tangentInfo = key[2]
except:
tangentInfo = [None, None, None, None, None, None]
pass
if cmds.objExists(character + ":" + control):
cmds.setKeyframe(character + ":" + control, animLayer = layer, at = attribute, t = frame, value = value, noResolve = True)
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), itt = tangentInfo[1])
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ott = tangentInfo[1])
if tangentInfo[2] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, ia = tangentInfo[2])
if tangentInfo[3] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, oa = tangentInfo[3])
if tangentInfo[4] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), iw = tangentInfo[4])
if tangentInfo[5] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ow = tangentInfo[5])
#refresh scene
cmds.select(character + ":" + control)
cmds.setToolTo('moveSuperContext')
cmds.select(clear = True)
else:
for info in keyInfo:
for i in info:
attrs = i[1]
for attr in attrs:
attribute = attr[0]
if cmds.objExists(character + ":" + control + "." + attribute):
keys = attr[1]
for key in keys:
frame = key[0]
value = key[1]
#grab tangent info if there was any
try:
tangentInfo = key[2]
except:
tangentInfo = [None, None, None, None, None, None]
pass
if cmds.objExists(character + ":" + control):
if cmds.animLayer("BaseAnimation", q = True, exists = True):
cmds.setKeyframe(character + ":" + control, at = attribute, t = frame, value = value, animLayer = "BaseAnimation")
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), itt = tangentInfo[1])
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ott = tangentInfo[1])
if tangentInfo[2] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, ia = tangentInfo[2])
if tangentInfo[3] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, oa = tangentInfo[3])
if tangentInfo[4] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), iw = tangentInfo[4])
if tangentInfo[5] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ow = tangentInfo[5])
else:
cmds.setKeyframe(character + ":" + control, at = attribute, t = frame, value = value)
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), itt = tangentInfo[1])
if tangentInfo[1] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ott = tangentInfo[1])
if tangentInfo[2] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, ia = tangentInfo[2])
if tangentInfo[3] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), a = True, oa = tangentInfo[3])
if tangentInfo[4] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), iw = tangentInfo[4])
if tangentInfo[5] != None:
cmds.keyTangent(character + ":" + control + "." + attribute, t = (frame, frame), ow = tangentInfo[5])
cmds.progressWindow(progressWindow, endProgress=1)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def switchMode(self, mode, *args):
if mode == "fbx":
cmds.formLayout(self.widgets["importMocapForm"], edit = True, visible = True)
cmds.formLayout(self.widgets["importAnimationFormLayout"], edit = True, visible = False)
if mode == "anim":
cmds.formLayout(self.widgets["importMocapForm"], edit = True, visible = False)
cmds.formLayout(self.widgets["importAnimationFormLayout"], edit = True, visible = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def fbxBrowse(self, *args):
path = cmds.fileDialog2(fm = 1, fileFilter = "*.fbx", okc = "Select")[0]
#edit the text field with the above path passed in
cmds.textField(self.widgets["fbxImportTextField"], edit = True, text = path)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def ikHeelSolve(self, character, start, end, *args):
lValues = []
rValues = []
cmds.progressWindow(self.progWindow, edit=True, progress= 60, status= "Solving IK Foot Roll" )
for i in range(int(start), int(end + 1)):
cmds.currentTime(i)
if cmds.objExists("ball_l"):
lBallVal = cmds.getAttr("ball_l.rz")
lValues.append(lBallVal)
if cmds.objExists("ball_r"):
rBallVal = cmds.getAttr("ball_r.rz")
rValues.append(rBallVal)
cmds.progressWindow(self.progWindow, edit=True, progress= 80, status= "Solving IK Foot Roll" )
x = 0
for i in range(int(start), int(end + 1)):
cmds.currentTime(i)
if cmds.objExists("ball_l"):
if lValues[x] > 10:
cmds.setAttr(character + ":ik_foot_anim_l.rx", 0)
cmds.setAttr(character + ":ik_foot_anim_l.ry", 0)
cmds.setAttr(character + ":ik_foot_anim_l.rz", 0)
cmds.setKeyframe(character + ":ik_foot_anim_l")
cmds.setAttr(character + ":heel_ctrl_l.rz", lValues[x] * -1)
cmds.setKeyframe(character + ":heel_ctrl_l.rz")
footPos = cmds.xform("foot_l", q = True, ws = True, t = True)
ikFootPos = cmds.xform(character + ":ik_leg_foot_l", q = True, ws = True, t = True)
yDiff = footPos[1] - ikFootPos[1]
zDiff = footPos[2] - ikFootPos[2]
cmds.xform(character + ":ik_foot_anim_l", r = True, t = [0, yDiff, zDiff])
cmds.setKeyframe(character + ":ik_foot_anim_l")
else:
cmds.setAttr(character + ":heel_ctrl_l.rz", 0)
cmds.setKeyframe(character + ":heel_ctrl_l.rz")
if cmds.objExists("ball_r"):
if rValues[x] > 10:
cmds.setAttr(character + ":ik_foot_anim_r.rx", 0)
cmds.setAttr(character + ":ik_foot_anim_r.ry", 0)
cmds.setAttr(character + ":ik_foot_anim_r.rz", 0)
cmds.setKeyframe(character + ":ik_foot_anim_r")
cmds.setAttr(character + ":heel_ctrl_r.rz", rValues[x] * -1)
cmds.setKeyframe(character + ":heel_ctrl_r.rz")
footPos = cmds.xform("foot_r", q = True, ws = True, t = True)
ikFootPos = cmds.xform(character + ":ik_leg_foot_r", q = True, ws = True, t = True)
yDiff = footPos[1] - ikFootPos[1]
zDiff = footPos[2] - ikFootPos[2]
cmds.xform(character + ":ik_foot_anim_r", r = True, t = [0, yDiff, zDiff])
cmds.setKeyframe(character + ":ik_foot_anim_r")
else:
cmds.setAttr(character + ":heel_ctrl_r.rz", 0)
cmds.setKeyframe(character + ":heel_ctrl_r.rz")
#iterate x
x = x + 1
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def ikKneeSolve(self, character, start, end, *args):
#Hide all the things
panels = cmds.getPanel(type = 'modelPanel')
for panel in panels:
editor = cmds.modelPanel(panel, q = True, modelEditor = True)
try:
cmds.modelEditor(editor, edit = True, interactive = False, displayTextures = False, textures = False, allObjects = False )
except:
pass
startPointR = cmds.xform(character + ":ik_leg_calf_r", q = True, ws = True, t = True)
endPointR = cmds.xform("calf_r", q = True, ws = True, t = True)
distR = cmds.distanceDimension( sp=(startPointR[0],startPointR[1],startPointR[2]), ep=(endPointR[0], endPointR[1], endPointR[2]) )
distRParent = cmds.listRelatives(distR, parent = True)[0]
locsR = cmds.listConnections(distR)
startLocR = locsR[0]
endLocR = locsR[1]
cmds.pointConstraint(character + ":ik_leg_calf_r", startLocR)
cmds.pointConstraint("calf_r", endLocR)
startPointL = cmds.xform(character + ":ik_leg_calf_l", q = True, ws = True, t = True)
endPointL = cmds.xform("calf_l", q = True, ws = True, t = True)
distL = cmds.distanceDimension( sp=(startPointL[0],startPointL[1],startPointL[2]), ep=(endPointL[0], endPointL[1], endPointL[2]) )
distLParent = cmds.listRelatives(distL, parent = True)[0]
locsL = cmds.listConnections(distL)
startLocL = locsL[0]
endLocL = locsL[1]
cmds.pointConstraint(character + ":ik_leg_calf_l", startLocL)
cmds.pointConstraint("calf_l", endLocL)
cmds.currentTime(int(start))
#get distance between rig knees and mocap knees
for i in range(int(start), int(end) + 1):
cmds.currentTime(i)
distanceR = cmds.getAttr(distR + ".distance")
distanceL = cmds.getAttr(distL + ".distance")
self.checkDistance(character, distL, distanceL, distanceL, "l")
self.checkDistance(character, distR, distanceR, distanceR, "r")
#clean up
cmds.delete([locsL[0], locsL[1], locsR[0], locsR[1], distL, distR, distRParent, distLParent])
#Show all the things
panels = cmds.getPanel(type = 'modelPanel')
for panel in panels:
editor = cmds.modelPanel(panel, q = True, modelEditor = True)
try:
cmds.modelEditor(editor, edit = True, interactive = True, displayTextures = True, textures = True, allObjects = True )
except:
pass
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def checkDistance(self, character, distanceNode, distanceAttr, originalValue, side):
if distanceAttr > 1:
currentAttr = cmds.getAttr(character + ":ik_foot_anim_" + side + ".knee_twist")
try:
cmds.setAttr(character + ":ik_foot_anim_" + side + ".knee_twist", currentAttr + 1)
cmds.setKeyframe(character + ":ik_foot_anim_" + side + ".knee_twist")
newDist = cmds.getAttr(distanceNode + ".distance")
if newDist < originalValue:
self.checkDistance(character, distanceNode, newDist, newDist, side)
cmds.progressWindow(self.progWindow, edit=True, progress= (cmds.progressWindow(q = True, progress = True) + 3), status= "Solving IK Pole Vectors" )
if newDist > originalValue:
cmds.setAttr(character + ":ik_foot_anim_" + side + ".knee_twist", currentAttr - 2)
cmds.setKeyframe(character + ":ik_foot_anim_" + side + ".knee_twist")
newDist = cmds.getAttr(distanceNode + ".distance")
self.checkDistance(character, distanceNode, newDist, newDist, side)
cmds.progressWindow(self.progWindow, edit=True, progress= (cmds.progressWindow(q = True, progress = True) + 3), status= "Solving IK Pole Vectors" )
except:
pass
#if adding 1 makes dist get smaller, continue adding 1 until the overall distance is less than 1
#if subtracting 1 makes dist get smaller, continue subtracting 1 until the overall distance is less than 1
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def importMocap(self, *args):
#get the fbx file
filePath = cmds.textField(self.widgets["fbxImportTextField"], q = True, text = True)
if not os.path.exists(filePath):
cmds.warning("The given file path is not a valid path.")
return
else:
#get the active character
character = cmds.optionMenu(self.widgets["importMocap_characterList"], q = True, value = True)
#duplicate that character's root
if cmds.objExists("root"):
cmds.warning("There is already a skeleton in the scene with the name \"root\". Aborting")
return
newSkeleton = cmds.duplicate(character + ":root")
#find import method
selectedRadioButton = cmds.radioCollection(self.widgets["importMethodRadioCollection"], q = True, select = True)
importMethod = cmds.radioButton(selectedRadioButton, q = True, label = True)
#find parts that motion will be applied to
head = cmds.iconTextCheckBox(self.widgets["importMotionTo_HeadButton"], q = True, value = True)
body = cmds.iconTextCheckBox(self.widgets["importMotionTo_SpineButton"], q = True, value = True)
leftArm = cmds.iconTextCheckBox(self.widgets["importMotionTo_lArmButton"], q = True, value = True)
rightArm = cmds.iconTextCheckBox(self.widgets["importMotionTo_rArmButton"], q = True, value = True)
leftLeg = cmds.iconTextCheckBox(self.widgets["importMotionTo_lLegButton"], q = True, value = True)
rightLeg = cmds.iconTextCheckBox(self.widgets["importMotionTo_rLegButton"], q = True, value = True)
if importMethod == "FK":
extraControls = self.importMocap_FK(character, head, body, leftArm, rightArm, leftLeg, rightLeg)
if importMethod == "IK":
extraControls = self.importMocap_IK(character, head, body, leftArm, rightArm, leftLeg, rightLeg)
if importMethod == "Both":
extraControls = self.importMocap_FK(character, head, body, leftArm, rightArm, leftLeg, rightLeg)
self.importMocap_IK(character, head, body, leftArm, rightArm, leftLeg, rightLeg)
print extraControls
#fingers
self.importMocap_Fingers(character, leftArm, rightArm)
#ensure that the scene is in 30fps
cmds.currentUnit(time = 'ntsc')
cmds.playbackOptions(min = 0, max = 100, animationStartTime = 0, animationEndTime = 100)
cmds.currentTime(0)
#import the FBX file
string = "FBXImportMode -v \"exmerge\";"
mel.eval(string)
cmds.file(filePath, i = True, prompt = False, force = True)
animLayers = cmds.ls(type = "animLayer")
if animLayers != []:
for layer in animLayers:
cmds.animLayer(layer, edit = True, selected = False)
cmds.animLayer("BaseAnimation", edit = True, selected = True, preferred = True)
#snap timeline to length of imported animation
firstFrame = cmds.findKeyframe("pelvis", which = 'first')
lastFrame = cmds.findKeyframe("pelvis", which = 'last')
if lastFrame == firstFrame:
lastFrame = lastFrame + 1
cmds.playbackOptions(min = firstFrame, max = lastFrame, animationStartTime = firstFrame, animationEndTime = lastFrame)
#bake the animation down onto the controls
cmds.select(clear = True)
if importMethod == "FK":
if body == True:
cmds.select(character + ":body_anim", add = True)
cmds.select(character + ":spine_01_anim", add = True)
cmds.select(character + ":spine_02_anim", add = True)
if cmds.objExists(character + ":spine_03_anim"):
cmds.select(character + ":spine_03_anim", add = True)
if cmds.objExists(character + ":spine_04_anim"):
cmds.select(character + ":spine_04_anim", add = True)
if cmds.objExists(character + ":spine_05_anim"):
cmds.select(character + ":spine_05_anim", add = True)
if head == True:
cmds.select(character + ":head_fk_anim", add = True)
if cmds.objExists(character + ":neck_01_fk_anim"):
cmds.select(character + ":neck_01_fk_anim", add = True)
if cmds.objExists(character + ":neck_02_fk_anim"):
cmds.select(character + ":neck_02_fk_anim", add = True)
if cmds.objExists(character + ":neck_03_fk_anim"):
cmds.select(character + ":neck_03_fk_anim", add = True)
if leftArm == True:
cmds.select(character + ":clavicle_l_anim", add = True)
cmds.select(character + ":fk_arm_l_anim", add = True)
cmds.select(character + ":fk_elbow_l_anim", add = True)
cmds.select(character + ":fk_wrist_l_anim", add = True)
if rightArm == True:
cmds.select(character + ":clavicle_r_anim", add = True)
cmds.select(character + ":fk_arm_r_anim", add = True)
cmds.select(character + ":fk_elbow_r_anim", add = True)
cmds.select(character + ":fk_wrist_r_anim", add = True)
if leftLeg:
cmds.select(character + ":fk_thigh_l_anim", add = True)
cmds.select(character + ":fk_calf_l_anim", add = True)
cmds.select(character + ":fk_foot_l_anim", add = True)
if cmds.objExists("ball_l"):
cmds.select(character + ":fk_ball_l_anim", add = True)
if rightLeg:
cmds.select(character + ":fk_thigh_r_anim", add = True)
cmds.select(character + ":fk_calf_r_anim", add = True)
cmds.select(character + ":fk_foot_r_anim", add = True)
if cmds.objExists("ball_r"):
cmds.select(character + ":fk_ball_r_anim", add = True)
if importMethod == "IK":
if body == True:
cmds.select(character + ":body_anim", add = True)
cmds.select(character + ":chest_ik_anim", add = True)
cmds.select(character + ":mid_ik_anim", add = True)
if head == True:
cmds.select(character + ":head_fk_anim", add = True)
if cmds.objExists(character + ":neck_01_fk_anim"):
cmds.select(character + ":neck_01_fk_anim", add = True)
if cmds.objExists(character + ":neck_02_fk_anim"):
cmds.select(character + ":neck_02_fk_anim", add = True)
if cmds.objExists(character + ":neck_03_fk_anim"):
cmds.select(character + ":neck_03_fk_anim", add = True)
if leftArm == True:
cmds.select(character + ":ik_wrist_l_anim", add = True)
cmds.select(character + ":ik_elbow_l_anim", add = True)
if rightArm == True:
cmds.select(character + ":ik_wrist_r_anim", add = True)
cmds.select(character + ":ik_elbow_r_anim", add = True)
if leftLeg == True:
cmds.select(character + ":ik_foot_anim_l", add = True)
if rightLeg == True:
cmds.select(character + ":ik_foot_anim_r", add = True)
if importMethod == "Both":
if body == True:
cmds.select(character + ":body_anim", add = True)
cmds.select(character + ":chest_ik_anim", add = True)
cmds.select(character + ":mid_ik_anim", add = True)
cmds.select(character + ":body_anim", add = True)
cmds.select(character + ":spine_01_anim", add = True)
cmds.select(character + ":spine_02_anim", add = True)
if cmds.objExists(character + ":spine_03_anim"):
cmds.select(character + ":spine_03_anim", add = True)
if cmds.objExists(character + ":spine_04_anim"):
cmds.select(character + ":spine_04_anim", add = True)
if cmds.objExists(character + ":spine_05_anim"):
cmds.select(character + ":spine_05_anim", add = True)
if head == True:
cmds.select(character + ":head_fk_anim", add = True)
if cmds.objExists(character + ":neck_01_fk_anim"):
cmds.select(character + ":neck_01_fk_anim", add = True)
if cmds.objExists(character + ":neck_02_fk_anim"):
cmds.select(character + ":neck_02_fk_anim", add = True)
if cmds.objExists(character + ":neck_03_fk_anim"):
cmds.select(character + ":neck_03_fk_anim", add = True)
if leftArm == True:
cmds.select(character + ":ik_wrist_l_anim", add = True)
cmds.select(character + ":ik_elbow_l_anim", add = True)
cmds.select(character + ":clavicle_l_anim", add = True)
cmds.select(character + ":fk_arm_l_anim", add = True)
cmds.select(character + ":fk_elbow_l_anim", add = True)
cmds.select(character + ":fk_wrist_l_anim", add = True)
if rightArm == True:
cmds.select(character + ":ik_wrist_r_anim", add = True)
cmds.select(character + ":ik_elbow_r_anim", add = True)
cmds.select(character + ":clavicle_r_anim", add = True)
cmds.select(character + ":fk_arm_r_anim", add = True)
cmds.select(character + ":fk_elbow_r_anim", add = True)
cmds.select(character + ":fk_wrist_r_anim", add = True)
if leftLeg == True:
cmds.select(character + ":ik_foot_anim_l", add = True)
cmds.select(character + ":fk_thigh_l_anim", add = True)
cmds.select(character + ":fk_calf_l_anim", add = True)
cmds.select(character + ":fk_foot_l_anim", add = True)
if cmds.objExists("ball_l"):
cmds.select(character + ":fk_ball_l_anim", add = True)
if rightLeg == True:
cmds.select(character + ":ik_foot_anim_r", add = True)
cmds.select(character + ":fk_thigh_r_anim", add = True)
cmds.select(character + ":fk_calf_r_anim", add = True)
cmds.select(character + ":fk_foot_r_anim", add = True)
if cmds.objExists("ball_r"):
cmds.select(character + ":fk_ball_r_anim", add = True)
#select fingers:
for side in ["l", "r"]:
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
if cmds.objExists(character + ":" + finger + "_finger_fk_ctrl_1_" + side):
cmds.select(character + ":" + finger + "_finger_fk_ctrl_1_" + side, add = True)
if cmds.objExists(character + ":" + finger + "_finger_fk_ctrl_2_" + side):
cmds.select(character + ":" + finger + "_finger_fk_ctrl_2_" + side, add = True)
if cmds.objExists(character + ":" + finger + "_finger_fk_ctrl_3_" + side):
cmds.select(character + ":" + finger + "_finger_fk_ctrl_3_" + side, add = True)
#select exttra controls
if len(extraControls) > 0:
for ctrl in extraControls:
cmds.select(ctrl, add = True)
#bake simulation
cmds.bakeResults(simulation = True, t = (firstFrame, lastFrame))
#fix ik knee solve
if importMethod == "IK" or importMethod == "Both":
if leftLeg == True or rightLeg == True:
self.progWindow = cmds.progressWindow(title = "Import Mocap", progress = 10, status = "Solving IK Pole Vectors")
val = cmds.checkBox(self.widgets["kneeSolverCB"], q = True, v = True)
if val == True:
self.ikKneeSolve(character, firstFrame, lastFrame)
val = cmds.checkBox(self.widgets["heelSolverCB"], q = True, v = True)
if val == True:
self.ikHeelSolve(character, firstFrame, lastFrame)
cmds.progressWindow(self.progWindow, endProgress=True)
#apply frame offset
offset = cmds.intField(self.widgets["frameOffsetField"], q = True, value = True)
cmds.select(clear = True)
for control in ["head_fk_anim", "neck_01_fk_anim", "neck_02_fk_anim", "neck_03_fk_anim", "spine_01_anim", "spine_02_anim", "spine_03_anim", "spine_04_anim", "spine_05_anim", "mid_ik_anim", "chest_ik_anim",
"body_anim", "hip_anim", "clavicle_l_anim", "clavicle_r_anim", "fk_arm_l_anim", "fk_arm_r_anim", "fk_elbow_l_anim", "fk_elbow_r_anim", "fk_wrist_l_anim", "fk_wrist_r_anim",
"ik_elbow_l_anim", "ik_elbow_r_anim", "ik_wrist_l_anim", "ik_wrist_r_anim", "fk_thigh_l_anim", "fk_thigh_r_anim", "fk_calf_l_anim", "fk_calf_r_anim", "fk_foot_l_anim", "fk_foot_r_anim",
"fk_ball_l_anim", "fk_ball_r_anim","ik_knee_anim_l", "ik_knee_anim_r", "ik_foot_anim_l", "ik_foot_anim_r", "index_finger_fk_ctrl_1_r", "index_finger_fk_ctrl_2_r", "index_finger_fk_ctrl_3_r",
"index_finger_fk_ctrl_1_l", "index_finger_fk_ctrl_2_l", "index_finger_fk_ctrl_3_l", "middle_finger_fk_ctrl_1_r", "middle_finger_fk_ctrl_2_r", "middle_finger_fk_ctrl_3_r",
"middle_finger_fk_ctrl_1_l", "middle_finger_fk_ctrl_2_l", "middle_finger_fk_ctrl_3_l", "ring_finger_fk_ctrl_1_r", "ring_finger_fk_ctrl_2_r", "ring_finger_fk_ctrl_3_r",
"ring_finger_fk_ctrl_1_l", "ring_finger_fk_ctrl_2_l", "ring_finger_fk_ctrl_3_l", "pinky_finger_fk_ctrl_1_r", "pinky_finger_fk_ctrl_2_r", "pinky_finger_fk_ctrl_3_r",
"pinky_finger_fk_ctrl_1_l", "pinky_finger_fk_ctrl_2_l", "pinky_finger_fk_ctrl_3_l", "thumb_finger_fk_ctrl_1_r", "thumb_finger_fk_ctrl_2_r", "thumb_finger_fk_ctrl_3_r", "thumb_finger_fk_ctrl_1_l", "thumb_finger_fk_ctrl_2_l", "thumb_finger_fk_ctrl_3_l"]:
if cmds.objExists(character + ":" + control):
cmds.select(character + ":" + control, add = True)
cmds.selectKey()
cmds.keyframe(edit = True, r = True, tc = offset)
firstFrame = cmds.findKeyframe("pelvis", which = 'first')
lastFrame = cmds.findKeyframe("pelvis", which = 'last')
cmds.playbackOptions(min = firstFrame, max = lastFrame, animationStartTime = firstFrame, animationEndTime = lastFrame)
#clean up
cmds.select(clear = True)
#delete the old skeleton
cmds.delete("root")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def importMocap_IK(self, character, head, body, leftArm, rightArm, leftLeg, rightLeg):
if body == True:
#switch to IK mode
cmds.setAttr(character + ":Rig_Settings.spine_ik", 1)
cmds.setAttr(character + ":Rig_Settings.spine_fk", 0)
#constraints
cmds.parentConstraint("pelvis", character + ":body_anim")
if cmds.objExists(character + ":chest_ik_anim"):
#find highest spine joint
numSpineBones = cmds.getAttr(character + ":Skeleton_Settings.numSpineBones")
if numSpineBones == 5:
endSpine = "spine_05"
midSpine = ["spine_03"]
if numSpineBones == 4:
endSpine = "spine_04"
midSpine = ["spine_02", "spine_03"]
if numSpineBones == 3:
endSpine = "spine_03"
midSpine = ["spine_02"]
cmds.parentConstraint(endSpine, character + ":chest_ik_anim")
for each in midSpine:
cmds.parentConstraint(each, character + ":mid_ik_anim")
if head == True:
cmds.orientConstraint("neck_01", character + ":neck_01_fk_anim")
if cmds.objExists(character + ":neck_02_fk_anim"):
cmds.orientConstraint("neck_02", character + ":neck_02_fk_anim")
if cmds.objExists(character + ":neck_03_fk_anim"):
cmds.orientConstraint("neck_03", character + ":neck_03_fk_anim")
cmds.orientConstraint("head", character + ":head_fk_anim")
if leftArm == True:
#switch to IK mode
cmds.setAttr(character + ":Rig_Settings.lArmMode", 1)
#constraints
cmds.parentConstraint("hand_l", character + ":ik_wrist_l_anim", mo = True)
cmds.pointConstraint("lowerarm_l", character + ":ik_elbow_l_anim")
if rightArm == True:
#switch to IK mode
cmds.setAttr(character + ":Rig_Settings.rArmMode", 1)
#constraints
cmds.parentConstraint("hand_r", character + ":ik_wrist_r_anim", mo = True)
cmds.pointConstraint("lowerarm_r", character + ":ik_elbow_r_anim")
if leftLeg == True:
print "hooking up left leg"
#switch to IK mode
cmds.setAttr(character + ":Rig_Settings.lLegMode", 1)
#constraints
cmds.pointConstraint("foot_l", character + ":ik_foot_anim_l")
constraint = cmds.orientConstraint("foot_l", character + ":ik_foot_anim_l")[0]
cmds.setAttr(constraint + ".offsetY", 90)
if rightLeg == True:
print "hooking up right leg"
#switch to IK mode
cmds.setAttr(character + ":Rig_Settings.rLegMode", 1)
#constraints
cmds.pointConstraint("foot_r", character + ":ik_foot_anim_r", mo = True)
constraint = cmds.orientConstraint("foot_r", character + ":ik_foot_anim_r")[0]
cmds.setAttr(constraint + ".offsetX", 180)
cmds.setAttr(constraint + ".offsetY", 90)
coreJoints = ["root", "pelvis", "spine_01", "spine_02", "spine_03", "spine_04", "spine_05", "neck_01", "neck_02", "neck_03", "upperarm_l", "clavicle_l", "lowerarm_l", "hand_l", "upperarm_r", "clavicle_r", "lowerarm_r", "hand_r", "thigh_l", "calf_l", "foot_l", "ball_l", "thigh_r", "calf_r", "foot_r", "ball_r"]
cmds.select("root", hi = True)
allJoints = cmds.ls(sl = True)
extraControls = []
for joint in allJoints:
if joint not in coreJoints:
if cmds.objExists(character + ":" + joint + "_anim"):
try:
constraint = cmds.parentConstraint(joint, character + ":" + joint + "_anim")
extraControls.append(character + ":" + joint + "_anim")
except:
pass
return extraControls
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def importMocap_FK(self, character, head, body, leftArm, rightArm, leftLeg, rightLeg):
#setup constraints to FK controls
if body == True:
#switch to FK mode
cmds.setAttr(character + ":Rig_Settings.spine_ik", 0)
cmds.setAttr(character + ":Rig_Settings.spine_fk", 1)
#constraints
cmds.parentConstraint("pelvis", character + ":body_anim")
cmds.orientConstraint("spine_01", character + ":spine_01_anim")
cmds.orientConstraint("spine_02", character + ":spine_02_anim")
if cmds.objExists(character + ":spine_03_anim"):
cmds.orientConstraint("spine_03", character + ":spine_03_anim")
if cmds.objExists(character + ":spine_04_anim"):
cmds.orientConstraint("spine_04", character + ":spine_04_anim")
if cmds.objExists(character + ":spine_05_anim"):
cmds.orientConstraint("spine_05", character + ":spine_05_anim")
if head == True:
cmds.orientConstraint("neck_01", character + ":neck_01_fk_anim")
if cmds.objExists(character + ":neck_02_fk_anim"):
cmds.orientConstraint("neck_02", character + ":neck_02_fk_anim")
if cmds.objExists(character + ":neck_03_fk_anim"):
cmds.orientConstraint("neck_03", character + ":neck_03_fk_anim")
cmds.orientConstraint("head", character + ":head_fk_anim")
if leftArm == True:
#switch to FK mode
cmds.setAttr(character + ":Rig_Settings.lArmMode", 0)
cmds.pointConstraint("upperarm_l", character + ":clavicle_l_anim")
cmds.orientConstraint("upperarm_l", character + ":fk_arm_l_anim")
cmds.orientConstraint("lowerarm_l", character + ":fk_elbow_l_anim")
cmds.orientConstraint("hand_l", character + ":fk_wrist_l_anim")
if rightArm == True:
#switch to FK mode
cmds.setAttr(character + ":Rig_Settings.rArmMode", 0)
cmds.pointConstraint("upperarm_r", character + ":clavicle_r_anim")
cmds.orientConstraint("upperarm_r", character + ":fk_arm_r_anim")
cmds.orientConstraint("lowerarm_r", character + ":fk_elbow_r_anim")
cmds.orientConstraint("hand_r", character + ":fk_wrist_r_anim")
if leftLeg == True:
#switch to FK mode
cmds.setAttr(character + ":Rig_Settings.lLegMode", 0)
cmds.orientConstraint("thigh_l", character + ":fk_thigh_l_anim")
cmds.orientConstraint("calf_l", character + ":fk_calf_l_anim")
cmds.orientConstraint("foot_l", character + ":fk_foot_l_anim")
if cmds.objExists("ball_l"):
cmds.orientConstraint("ball_l", character + ":fk_ball_l_anim")
if rightLeg == True:
#switch to FK mode
cmds.setAttr(character + ":Rig_Settings.rLegMode", 0)
cmds.orientConstraint("thigh_r", character + ":fk_thigh_r_anim")
cmds.orientConstraint("calf_r", character + ":fk_calf_r_anim")
cmds.orientConstraint("foot_r", character + ":fk_foot_r_anim")
if cmds.objExists("ball_r"):
cmds.orientConstraint("ball_r", character + ":fk_ball_r_anim")
coreJoints = ["root", "pelvis", "spine_01", "spine_02", "spine_03", "spine_04", "spine_05", "neck_01", "neck_02", "neck_03", "upperarm_l", "clavicle_l", "lowerarm_l", "hand_l", "upperarm_r", "clavicle_r", "lowerarm_r", "hand_r", "thigh_l", "calf_l", "foot_l", "ball_l", "thigh_r", "calf_r", "foot_r", "ball_r"]
cmds.select("root", hi = True)
allJoints = cmds.ls(sl = True)
extraControls = []
for joint in allJoints:
if joint not in coreJoints:
if cmds.objExists(character + ":" + joint + "_anim"):
try:
constraint = cmds.parentConstraint(joint, character + ":" + joint + "_anim")
extraControls.append(character + ":" + joint + "_anim")
except:
pass
return extraControls
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def importMocap_Fingers(self, character, leftArm, rightArm):
if leftArm == True:
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
#switch to FK mode
try:
cmds.setAttr(character + ":" + finger + "_finger_l_mode_anim.FK_IK", 0)
except:
pass
#setup constraints
if cmds.objExists(character + ":" + finger + "_finger_fk_ctrl_1_l"):
cmds.orientConstraint(finger + "_01_l", character + ":" + finger + "_finger_fk_ctrl_1_l")
if cmds.objExists(character + ":" + finger + "_finger_fk_ctrl_2_l"):
cmds.orientConstraint(finger + "_02_l", character + ":" + finger + "_finger_fk_ctrl_2_l")
if cmds.objExists(character + ":" + finger + "_finger_fk_ctrl_3_l"):
cmds.orientConstraint(finger + "_03_l", character + ":" + finger + "_finger_fk_ctrl_3_l")
if rightArm == True:
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
#switch to FK mode
try:
cmds.setAttr(character + ":" + finger + "_finger_r_mode_anim.FK_IK", 0)
except:
pass
#setup constraints
if cmds.objExists(character + ":" + finger + "_finger_fk_ctrl_1_r"):
cmds.orientConstraint(finger + "_01_r", character + ":" + finger + "_finger_fk_ctrl_1_r")
if cmds.objExists(character + ":" + finger + "_finger_fk_ctrl_2_r"):
cmds.orientConstraint(finger + "_02_r", character + ":" + finger + "_finger_fk_ctrl_2_r")
if cmds.objExists(character + ":" + finger + "_finger_fk_ctrl_3_r"):
cmds.orientConstraint(finger + "_03_r", character + ":" + finger + "_finger_fk_ctrl_3_r")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getCharacters(self):
referenceNodes = []
references = cmds.ls(type = "reference")
for reference in references:
niceName = reference.rpartition("RN")[0]
suffix = reference.rpartition("RN")[2]
if suffix != "":
if cmds.objExists(niceName + suffix + ":" + "Skeleton_Settings"):
referenceNodes.append(niceName + suffix)
else:
if cmds.objExists(niceName + ":" + "Skeleton_Settings"):
referenceNodes.append(niceName)
for node in referenceNodes:
cmds.menuItem(label = node, parent = self.widgets["importMocap_characterList"])
cmds.menuItem(label = node, parent = self.widgets["importAnim_characterList"])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def changeActiveCharacter(self):
characterName = cmds.optionMenu(self.widgets["importMocap_characterList"], q = True, value = True)
thumbnailPath = os.path.join(self.mayaToolsDir, "General", "Icons", "ART", "Thumbnails", self.project)
thumbs = os.listdir(thumbnailPath)
for thumb in thumbs:
if thumb.find("_small") != -1:
if thumb.find(characterName) == 0:
cmds.image(self.widgets["importMocap_characterThumb"], edit = True, image = thumbnailPath + thumb, ann = characterName)
cmds.image(self.widgets["importAnim_characterThumb"], edit = True, image = thumbnailPath + thumb, ann = characterName)
| 82,725
| 82,725
| 0.473388
|
7953463774785daa6d20c22092ea902dcf1af37d
| 825
|
py
|
Python
|
src/test/python/test_servo.py
|
SmartDogHouse/SmartDogHouse-Software
|
00caf23e3fe8dae2d30c339f801d7c1a7ddca0c2
|
[
"MIT"
] | 4
|
2021-05-17T10:42:48.000Z
|
2021-08-09T15:51:28.000Z
|
src/test/python/test_servo.py
|
SmartDogHouse/SmartDogHouse-Software
|
00caf23e3fe8dae2d30c339f801d7c1a7ddca0c2
|
[
"MIT"
] | 148
|
2021-05-17T10:03:52.000Z
|
2021-07-25T15:44:10.000Z
|
src/test/python/test_servo.py
|
SmartDogHouse/SmartDogHouse-Software
|
00caf23e3fe8dae2d30c339f801d7c1a7ddca0c2
|
[
"MIT"
] | 1
|
2021-06-27T17:25:20.000Z
|
2021-06-27T17:25:20.000Z
|
import unittest
from src.main.python.servo import Servo
class TestServo(unittest.TestCase):
servo = Servo(99)
def test_create(self):
s2 = Servo(34)
self.assertEqual(self.servo.get_angle(), s2.get_angle())
def test_angle(self):
s3 = Servo(3, range_min=30, range_max=130, frequency=50)
self.assertEqual(self.servo.get_angle(), s3.get_angle())
self.assertEqual(self.servo.get_angle(), 0)
# move
self.assertTrue(s3.angle(50))
self.assertTrue(s3.angle(110))
# out of range
self.assertFalse(s3.angle(0))
self.assertFalse(s3.angle(200))
def test_printSensor(self):
s4 = Servo(3, range_min=50, range_max=7, frequency=50)
print(s4)
print(self.servo)
if __name__ == '__main__':
unittest.main()
| 25
| 64
| 0.630303
|
7953463e6242e16dea715f8815d795b86f30ac37
| 24,953
|
py
|
Python
|
efficientdet/inference.py
|
vincent7293/automl
|
34279e956ec30877beaec0fc73acd5071ad0a8fd
|
[
"Apache-2.0"
] | null | null | null |
efficientdet/inference.py
|
vincent7293/automl
|
34279e956ec30877beaec0fc73acd5071ad0a8fd
|
[
"Apache-2.0"
] | null | null | null |
efficientdet/inference.py
|
vincent7293/automl
|
34279e956ec30877beaec0fc73acd5071ad0a8fd
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Google Research. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Inference related utilities."""
from __future__ import absolute_import
from __future__ import division
# gtype import
from __future__ import print_function
import copy
import os
import time
from absl import logging
import numpy as np
from PIL import Image
import tensorflow.compat.v1 as tf
from typing import Text, Dict, Any, List, Tuple, Union
import anchors
import dataloader
import det_model_fn
import hparams_config
import utils
from visualize import vis_utils
coco_id_mapping = {
1: 'person', 2: 'bicycle', 3: 'car', 4: 'motorcycle', 5: 'airplane',
6: 'bus', 7: 'train', 8: 'truck', 9: 'boat', 10: 'traffic light',
11: 'fire hydrant', 13: 'stop sign', 14: 'parking meter', 15: 'bench',
16: 'bird', 17: 'cat', 18: 'dog', 19: 'horse', 20: 'sheep', 21: 'cow',
22: 'elephant', 23: 'bear', 24: 'zebra', 25: 'giraffe', 27: 'backpack',
28: 'umbrella', 31: 'handbag', 32: 'tie', 33: 'suitcase', 34: 'frisbee',
35: 'skis', 36: 'snowboard', 37: 'sports ball', 38: 'kite',
39: 'baseball bat', 40: 'baseball glove', 41: 'skateboard', 42: 'surfboard',
43: 'tennis racket', 44: 'bottle', 46: 'wine glass', 47: 'cup', 48: 'fork',
49: 'knife', 50: 'spoon', 51: 'bowl', 52: 'banana', 53: 'apple',
54: 'sandwich', 55: 'orange', 56: 'broccoli', 57: 'carrot', 58: 'hot dog',
59: 'pizza', 60: 'donut', 61: 'cake', 62: 'chair', 63: 'couch',
64: 'potted plant', 65: 'bed', 67: 'dining table', 70: 'toilet', 72: 'tv',
73: 'laptop', 74: 'mouse', 75: 'remote', 76: 'keyboard', 77: 'cell phone',
78: 'microwave', 79: 'oven', 80: 'toaster', 81: 'sink', 82: 'refrigerator',
84: 'book', 85: 'clock', 86: 'vase', 87: 'scissors', 88: 'teddy bear',
89: 'hair drier', 90: 'toothbrush',
} # pyformat: disable
def image_preprocess(image, image_size: Union[int, Tuple[int, int]]):
"""Preprocess image for inference.
Args:
image: input image, can be a tensor or a numpy arary.
image_size: single integer of image size for square image or tuple of two
integers, in the format of (image_height, image_width).
Returns:
(image, scale): a tuple of processed image and its scale.
"""
input_processor = dataloader.DetectionInputProcessor(image, image_size)
input_processor.normalize_image()
input_processor.set_scale_factors_to_output_size()
image = input_processor.resize_and_crop_image()
image_scale = input_processor.image_scale_to_original
return image, image_scale
def build_inputs(image_path_pattern: Text,
image_size: Union[int, Tuple[int, int]]):
"""Read and preprocess input images.
Args:
image_path_pattern: a path to indicate a single or multiple files.
image_size: single integer of image size for square image or tuple of two
integers, in the format of (image_height, image_width).
Returns:
(raw_images, images, scales): raw images, processed images, and scales.
Raises:
ValueError if image_path_pattern doesn't match any file.
"""
raw_images, images, scales = [], [], []
for f in tf.io.gfile.glob(image_path_pattern):
image = Image.open(f)
raw_images.append(image)
image, scale = image_preprocess(image, image_size)
images.append(image)
scales.append(scale)
if not images:
raise ValueError(
'Cannot find any images for pattern {}'.format(image_path_pattern))
return raw_images, tf.stack(images), tf.stack(scales)
def build_model(model_name: Text, inputs: tf.Tensor, **kwargs):
"""Build model for a given model name.
Args:
model_name: the name of the model.
inputs: an image tensor or a numpy array.
**kwargs: extra parameters for model builder.
Returns:
(class_outputs, box_outputs): the outputs for class and box predictions.
Each is a dictionary with key as feature level and value as predictions.
"""
model_arch = det_model_fn.get_model_arch(model_name)
class_outputs, box_outputs = model_arch(inputs, model_name, **kwargs)
return class_outputs, box_outputs
def restore_ckpt(sess, ckpt_path, enable_ema=True, export_ckpt=None):
"""Restore variables from a given checkpoint.
Args:
sess: a tf session for restoring or exporting models.
ckpt_path: the path of the checkpoint. Can be a file path or a folder path.
enable_ema: whether reload ema values or not.
export_ckpt: whether to export the restored model.
"""
sess.run(tf.global_variables_initializer())
if tf.io.gfile.isdir(ckpt_path):
ckpt_path = tf.train.latest_checkpoint(ckpt_path)
if enable_ema:
ema = tf.train.ExponentialMovingAverage(decay=0.0)
ema_vars = utils.get_ema_vars()
var_dict = ema.variables_to_restore(ema_vars)
ema_assign_op = ema.apply(ema_vars)
else:
var_dict = utils.get_ema_vars()
ema_assign_op = None
tf.train.get_or_create_global_step()
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver(var_dict, max_to_keep=1)
saver.restore(sess, ckpt_path)
if export_ckpt:
print('export model to {}'.format(export_ckpt))
if ema_assign_op is not None:
sess.run(ema_assign_op)
saver = tf.train.Saver(max_to_keep=1, save_relative_paths=True)
saver.save(sess, export_ckpt)
def det_post_process(params: Dict[Any, Any],
cls_outputs: Dict[int, tf.Tensor],
box_outputs: Dict[int, tf.Tensor],
scales: List[float],
min_score_thresh,
max_boxes_to_draw):
"""Post preprocessing the box/class predictions.
Args:
params: a parameter dictionary that includes `min_level`, `max_level`,
`batch_size`, and `num_classes`.
cls_outputs: an OrderDict with keys representing levels and values
representing logits in [batch_size, height, width, num_anchors].
box_outputs: an OrderDict with keys representing levels and values
representing box regression targets in [batch_size, height, width,
num_anchors * 4].
scales: a list of float values indicating image scale.
min_score_thresh: A float representing the threshold for deciding when to
remove boxes based on score.
max_boxes_to_draw: Max number of boxes to draw.
Returns:
detections_batch: a batch of detection results. Each detection is a tensor
with each row representing [image_id, x, y, width, height, score, class].
"""
# TODO(tanmingxing): refactor the code to make it more explicity.
outputs = {
'cls_outputs_all': [None],
'box_outputs_all': [None],
'indices_all': [None],
'classes_all': [None]
}
det_model_fn.add_metric_fn_inputs(
params, cls_outputs, box_outputs, outputs, -1)
# Create anchor_label for picking top-k predictions.
eval_anchors = anchors.Anchors(params['min_level'], params['max_level'],
params['num_scales'], params['aspect_ratios'],
params['anchor_scale'], params['image_size'])
anchor_labeler = anchors.AnchorLabeler(eval_anchors, params['num_classes'])
# Add all detections for each input image.
detections_batch = []
for index in range(params['batch_size']):
cls_outputs_per_sample = outputs['cls_outputs_all'][index]
box_outputs_per_sample = outputs['box_outputs_all'][index]
indices_per_sample = outputs['indices_all'][index]
classes_per_sample = outputs['classes_all'][index]
detections = anchor_labeler.generate_detections(
cls_outputs_per_sample,
box_outputs_per_sample,
indices_per_sample,
classes_per_sample,
image_id=[index],
image_scale=[scales[index]],
min_score_thresh=min_score_thresh,
max_boxes_to_draw=max_boxes_to_draw,
disable_pyfun=params.get('disable_pyfun'))
detections_batch.append(detections)
return tf.stack(detections_batch, name='detections')
def visualize_image(image,
boxes,
classes,
scores,
id_mapping,
min_score_thresh=anchors.MIN_SCORE_THRESH,
max_boxes_to_draw=anchors.MAX_DETECTIONS_PER_IMAGE,
line_thickness=2,
**kwargs):
"""Visualizes a given image.
Args:
image: a image with shape [H, W, C].
boxes: a box prediction with shape [N, 4] ordered [ymin, xmin, ymax, xmax].
classes: a class prediction with shape [N].
scores: A list of float value with shape [N].
id_mapping: a dictionary from class id to name.
min_score_thresh: minimal score for showing. If claass probability is below
this threshold, then the object will not show up.
max_boxes_to_draw: maximum bounding box to draw.
line_thickness: how thick is the bounding box line.
**kwargs: extra parameters.
Returns:
output_image: an output image with annotated boxes and classes.
"""
category_index = {k: {'id': k, 'name': id_mapping[k]} for k in id_mapping}
img = np.array(image)
vis_utils.visualize_boxes_and_labels_on_image_array(
img,
boxes,
classes,
scores,
category_index,
min_score_thresh=min_score_thresh,
max_boxes_to_draw=max_boxes_to_draw,
line_thickness=line_thickness,
**kwargs)
return img
class ServingDriver(object):
"""A driver for serving single or batch images.
This driver supports serving with image files or arrays, with configurable
batch size.
Example 1. Serving streaming image contents:
driver = inference.ServingDriver(
'efficientdet-d0', '/tmp/efficientdet-d0', batch_size=1)
driver.build()
for m in image_iterator():
predictions = driver.serve_files([m])
driver.visualize(m, predictions[0])
# m is the new image with annotated boxes.
Example 2. Serving batch image contents:
imgs = []
for f in ['/tmp/1.jpg', '/tmp/2.jpg']:
imgs.append(np.array(Image.open(f)))
driver = inference.ServingDriver(
'efficientdet-d0', '/tmp/efficientdet-d0', batch_size=len(imgs))
driver.build()
predictions = driver.serve_images(imgs)
for i in range(len(imgs)):
driver.visualize(imgs[i], predictions[i])
Example 3: another way is to use SavedModel:
# step1: export a model.
driver = inference.ServingDriver('efficientdet-d0', '/tmp/efficientdet-d0')
driver.build()
driver.export('/tmp/saved_model_path')
# step2: Serve a model.
with tf.Session() as sess:
tf.saved_model.load(sess, ['serve'], self.saved_model_dir)
raw_images = []
for f in tf.io.gfile.glob('/tmp/images/*.jpg'):
raw_images.append(np.array(PIL.Image.open(f)))
detections = sess.run('detections:0', {'image_arrays:0': raw_images})
driver = inference.ServingDriver(
'efficientdet-d0', '/tmp/efficientdet-d0')
driver.visualize(raw_images[0], detections[0])
PIL.Image.fromarray(raw_images[0]).save(output_image_path)
"""
def __init__(self,
model_name: Text,
ckpt_path: Text,
image_size: Union[int, Tuple[int, int]] = None,
batch_size: int = 1,
num_classes: int = None,
enable_ema: bool = True,
label_id_mapping: Dict[int, Text] = None,
use_xla: bool = False,
data_format: Text = None,
min_score_thresh: float = None,
max_boxes_to_draw: float = None,
line_thickness: int = None):
"""Initialize the inference driver.
Args:
model_name: target model name, such as efficientdet-d0.
ckpt_path: checkpoint path, such as /tmp/efficientdet-d0/.
image_size: single integer of image size for square image or tuple of two
integers, in the format of (image_height, image_width). If None, use the
default image size defined by model_name.
batch_size: batch size for inference.
num_classes: number of classes. If None, use the default COCO classes.
enable_ema: whether to enable moving average.
label_id_mapping: a dictionary from id to name. If None, use the default
coco_id_mapping (with 90 classes).
use_xla: Whether run with xla optimization.
data_format: data format such as 'channel_last'.
min_score_thresh: minimal score threshold for filtering predictions.
max_boxes_to_draw: the maximum number of boxes per image.
line_thickness: the line thickness for drawing boxes.
"""
self.model_name = model_name
self.ckpt_path = ckpt_path
self.batch_size = batch_size
self.label_id_mapping = label_id_mapping or coco_id_mapping
self.params = hparams_config.get_detection_config(self.model_name).as_dict()
self.params.update(dict(is_training_bn=False, use_bfloat16=False))
if image_size:
self.params.update(dict(image_size=image_size))
if num_classes:
self.params.update(dict(num_classes=num_classes))
if data_format:
self.params.update(dict(data_format=data_format))
self.signitures = None
self.sess = None
self.disable_pyfun = True
self.enable_ema = enable_ema
self.use_xla = use_xla
self.min_score_thresh = min_score_thresh or anchors.MIN_SCORE_THRESH
self.max_boxes_to_draw = (
max_boxes_to_draw or anchors.MAX_DETECTIONS_PER_IMAGE)
self.line_thickness = line_thickness
def __del__(self):
if self.sess:
self.sess.close()
def _build_session(self):
sess_config = tf.ConfigProto()
if self.use_xla:
sess_config.graph_options.optimizer_options.global_jit_level = (
tf.OptimizerOptions.ON_2)
return tf.Session(config=sess_config)
def build(self, params_override=None):
"""Build model and restore checkpoints."""
params = copy.deepcopy(self.params)
if params_override:
params.update(params_override)
if not self.sess:
self.sess = self._build_session()
with self.sess.graph.as_default():
image_files = tf.placeholder(tf.string, name='image_files', shape=[None])
image_size = params['image_size']
raw_images = []
for i in range(self.batch_size):
image = tf.io.decode_image(image_files[i])
image.set_shape([None, None, None])
raw_images.append(image)
raw_images = tf.stack(raw_images, name='image_arrays')
scales, images = [], []
for i in range(self.batch_size):
image, scale = image_preprocess(raw_images[i], image_size)
scales.append(scale)
images.append(image)
scales = tf.stack(scales)
images = tf.stack(images)
if params['data_format'] == 'channels_first':
images = tf.transpose(images, [0, 3, 1, 2])
class_outputs, box_outputs = build_model(self.model_name, images,
**params)
params.update(
dict(batch_size=self.batch_size, disable_pyfun=self.disable_pyfun))
detections = det_post_process(
params,
class_outputs,
box_outputs,
scales,
self.min_score_thresh,
self.max_boxes_to_draw)
restore_ckpt(
self.sess,
self.ckpt_path,
enable_ema=self.enable_ema,
export_ckpt=None)
self.signitures = {
'image_files': image_files,
'image_arrays': raw_images,
'prediction': detections,
}
return self.signitures
def visualize(self, image, predictions, **kwargs):
"""Visualize predictions on image.
Args:
image: Image content in shape of [height, width, 3].
predictions: a list of vector, with each vector has the format of
[image_id, x, y, width, height, score, class].
**kwargs: extra parameters for vistualization, such as
min_score_thresh, max_boxes_to_draw, and line_thickness.
Returns:
annotated image.
"""
boxes = predictions[:, 1:5]
classes = predictions[:, 6].astype(int)
scores = predictions[:, 5]
# This is not needed if disable_pyfun=True
# convert [x, y, width, height] to [ymin, xmin, ymax, xmax]
# TODO(tanmingxing): make this convertion more efficient.
if not self.disable_pyfun:
boxes[:, [0, 1, 2, 3]] = boxes[:, [1, 0, 3, 2]]
boxes[:, 2:4] += boxes[:, 0:2]
return visualize_image(image, boxes, classes, scores, self.label_id_mapping,
**kwargs)
def serve_files(self, image_files: List[Text]):
"""Serve a list of input image files.
Args:
image_files: a list of image files with shape [1] and type string.
Returns:
A list of detections.
"""
if not self.sess:
self.build()
predictions = self.sess.run(
self.signitures['prediction'],
feed_dict={self.signitures['image_files']: image_files})
return predictions
def benchmark(self, image_arrays, trace_filename=None):
"""Benchmark inference latency/throughput.
Args:
image_arrays: a numpy array of image content.
trace_filename: If None, specify the filename for saving trace.
"""
if not self.sess:
self.build()
# init session
self.sess.run(
self.signitures['prediction'],
feed_dict={self.signitures['image_arrays']: image_arrays})
start = time.perf_counter()
for _ in range(10):
self.sess.run(
self.signitures['prediction'],
feed_dict={self.signitures['image_arrays']: image_arrays})
end = time.perf_counter()
inference_time = (end-start) / 10
print('Inference time: ', inference_time)
print('FPS: ', 1 / inference_time)
if trace_filename:
run_options = tf.RunOptions()
run_options.trace_level = tf.RunOptions.FULL_TRACE
run_metadata = tf.RunMetadata()
self.sess.run(
self.signitures['prediction'],
feed_dict={self.signitures['image_arrays']: image_arrays},
options=run_options, run_metadata=run_metadata)
with tf.io.gfile.GFile(trace_filename, 'w') as trace_file:
from tensorflow.python.client import timeline # pylint: disable=g-direct-tensorflow-import,g-import-not-at-top
trace = timeline.Timeline(step_stats=run_metadata.step_stats)
trace_file.write(
trace.generate_chrome_trace_format(show_memory=True))
def serve_images(self, image_arrays):
"""Serve a list of image arrays.
Args:
image_arrays: A list of image content with each image has shape [height,
width, 3] and uint8 type.
Returns:
A list of detections.
"""
if not self.sess:
self.build()
predictions = self.sess.run(
self.signitures['prediction'],
feed_dict={self.signitures['image_arrays']: image_arrays})
return predictions
def load(self, saved_model_dir):
if not self.sess:
self.sess = self._build_session()
self.signitures = {
'image_files': 'image_files:0',
'image_arrays': 'image_arrays:0',
'prediction': 'detections:0',
}
return tf.saved_model.load(self.sess, ['serve'], saved_model_dir)
def export(self, output_dir):
"""Export a saved model."""
signitures = self.signitures
signature_def_map = {
'serving_default':
tf.saved_model.predict_signature_def(
{signitures['image_arrays'].name: signitures['image_arrays']},
{signitures['prediction'].name: signitures['prediction']}),
'serving_base64':
tf.saved_model.predict_signature_def(
{signitures['image_files'].name: signitures['image_files']},
{signitures['prediction'].name: signitures['prediction']}),
}
b = tf.saved_model.Builder(output_dir)
b.add_meta_graph_and_variables(
self.sess,
tags=['serve'],
signature_def_map=signature_def_map,
assets_collection=tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS),
clear_devices=True)
b.save()
logging.info('Model saved at %s', output_dir)
class InferenceDriver(object):
"""A driver for doing batch inference.
Example usage:
driver = inference.InferenceDriver('efficientdet-d0', '/tmp/efficientdet-d0')
driver.inference('/tmp/*.jpg', '/tmp/outputdir')
"""
def __init__(self,
model_name: Text,
ckpt_path: Text,
image_size: Union[int, Tuple[int, int]] = None,
num_classes: int = None,
enable_ema: bool = True,
data_format: Text = None,
label_id_mapping: Dict[int, Text] = None):
"""Initialize the inference driver.
Args:
model_name: target model name, such as efficientdet-d0.
ckpt_path: checkpoint path, such as /tmp/efficientdet-d0/.
image_size: user specified image size. If None, use the default image size
defined by model_name.
num_classes: number of classes. If None, use the default COCO classes.
enable_ema: whether to enable moving average.
data_format: data format such as 'channel_last'.
label_id_mapping: a dictionary from id to name. If None, use the default
coco_id_mapping (with 90 classes).
"""
self.model_name = model_name
self.ckpt_path = ckpt_path
self.label_id_mapping = label_id_mapping or coco_id_mapping
self.params = hparams_config.get_detection_config(self.model_name).as_dict()
self.params.update(dict(is_training_bn=False, use_bfloat16=False))
if image_size:
self.params.update(dict(image_size=image_size))
if num_classes:
self.params.update(dict(num_classes=num_classes))
if data_format:
self.params.update(dict(data_format=data_format))
self.disable_pyfun = True
self.enable_ema = enable_ema
def inference(self, image_path_pattern: Text, output_dir: Text, **kwargs):
"""Read and preprocess input images.
Args:
image_path_pattern: Image file pattern such as /tmp/img*.jpg
output_dir: the directory for output images. Output images will be named
as 0.jpg, 1.jpg, ....
**kwargs: extra parameters for for vistualization, such as
min_score_thresh, max_boxes_to_draw, and line_thickness.
Returns:
Annotated image.
"""
params = copy.deepcopy(self.params)
with tf.Session() as sess:
# Buid inputs and preprocessing.
raw_images, images, scales = build_inputs(image_path_pattern,
params['image_size'])
if params['data_format'] == 'channels_first':
images = tf.transpose(images, [0, 3, 1, 2])
# Build model.
class_outputs, box_outputs = build_model(self.model_name, images,
**self.params)
restore_ckpt(
sess, self.ckpt_path, enable_ema=self.enable_ema, export_ckpt=None)
# for postprocessing.
params.update(
dict(batch_size=len(raw_images), disable_pyfun=self.disable_pyfun))
# Build postprocessing.
detections_batch = det_post_process(
params,
class_outputs,
box_outputs,
scales,
min_score_thresh=kwargs.get('min_score_thresh',
anchors.MIN_SCORE_THRESH),
max_boxes_to_draw=kwargs.get('max_boxes_to_draw',
anchors.MAX_DETECTIONS_PER_IMAGE))
outputs_np = sess.run(detections_batch)
# Visualize results.
for i, output_np in enumerate(outputs_np):
# output_np has format [image_id, y, x, height, width, score, class]
boxes = output_np[:, 1:5]
classes = output_np[:, 6].astype(int)
scores = output_np[:, 5]
# This is not needed if disable_pyfun=True
# convert [x, y, width, height] to [ymin, xmin, ymax, xmax]
# TODO(tanmingxing): make this convertion more efficient.
if not self.disable_pyfun:
boxes[:, [0, 1, 2, 3]] = boxes[:, [1, 0, 3, 2]]
boxes[:, 2:4] += boxes[:, 0:2]
img = visualize_image(raw_images[i], boxes, classes, scores,
self.label_id_mapping, **kwargs)
output_image_path = os.path.join(output_dir, str(i) + '.jpg')
Image.fromarray(img).save(output_image_path)
logging.info('writing file to %s', output_image_path)
return outputs_np
| 37.187779
| 119
| 0.658999
|
795347be61aeb59ccc4ff862b0c0d71efc5e0dfb
| 2,387
|
py
|
Python
|
setup.py
|
dmontaner/pandas_save_profiler
|
2bda6174a901caaeb30046ab5ebac9d78308ce9a
|
[
"MIT"
] | null | null | null |
setup.py
|
dmontaner/pandas_save_profiler
|
2bda6174a901caaeb30046ab5ebac9d78308ce9a
|
[
"MIT"
] | null | null | null |
setup.py
|
dmontaner/pandas_save_profiler
|
2bda6174a901caaeb30046ab5ebac9d78308ce9a
|
[
"MIT"
] | null | null | null |
import os
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
with open(os.path.join(here, 'src', 'pandas_save_profiler', '__init__.py'), encoding='utf-8') as f:
init_lines = f.readlines()
init_version = [x for x in init_lines if x.startswith('__version__')][0]
init_version = init_version.split('=')[1].replace('"', '').replace("'", "").strip()
setup(
name='pandas_save_profiler',
license='MIT',
description='Tools to evaluate pandas performance when saving dataframes in different file formats.',
version=init_version,
long_description=long_description,
long_description_content_type='text/markdown',
author='David Montaner',
author_email='david.montaner@gmail.com',
url='https://github.com/dmontaner/pandas_save_profiler',
packages=['pandas_save_profiler'],
package_dir={'': 'src'},
project_urls={
'Source Code' : 'https://github.com/dmontaner/pandas_save_profiler',
'Documentation': 'https://github.com/dmontaner/pandas_save_profiler',
'Issue Tracker': 'https://github.com/dmontaner/pandas_save_profiler/issues',
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Utilities',
'Topic :: Scientific/Engineering',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
],
keywords=[
'pandas',
'save',
'profile',
],
python_requires='>=3.6',
install_requires=[
'pandas',
'humanize',
'memory_profiler>=0.57.0',
'pyarrow',
'SQLAlchemy',
'xlwt', 'xlrd', 'openpyxl',
],
)
| 36.723077
| 105
| 0.623377
|
7953480ae0a491a9c97c11777c966a853bcb7580
| 1,501
|
py
|
Python
|
appengine/findit/infra_api_clients/isolate/isolate_util.py
|
allaparthi/monorail
|
e18645fc1b952a5a6ff5f06e0c740d75f1904473
|
[
"BSD-3-Clause"
] | 2
|
2021-04-13T21:22:18.000Z
|
2021-09-07T02:11:57.000Z
|
appengine/findit/infra_api_clients/isolate/isolate_util.py
|
allaparthi/monorail
|
e18645fc1b952a5a6ff5f06e0c740d75f1904473
|
[
"BSD-3-Clause"
] | 21
|
2020-09-06T02:41:05.000Z
|
2022-03-02T04:40:01.000Z
|
appengine/findit/infra_api_clients/isolate/isolate_util.py
|
allaparthi/monorail
|
e18645fc1b952a5a6ff5f06e0c740d75f1904473
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Client library to interact with isolate API."""
import base64
import json
import zlib
from infra_api_clients import http_client_util
def FetchFileFromIsolatedServer(digest, name_space, isolated_server,
http_client):
"""Sends retrieve request to isolated server and returns response content.
Args:
digest(str): Hash to file for retrieve request.
name_space(str): Name space info for retrieve request.
isolated_server(str): Host to isolate server.
http_client(RetryHttpClient): http client to send the request.
"""
post_data = {'digest': digest, 'namespace': {'namespace': name_space}}
url = '%s/_ah/api/isolateservice/v1/retrieve' % isolated_server
content, error = http_client_util.SendRequestToServer(
url, http_client, post_data=post_data)
if error:
return None, error
json_content = json.loads(content)
file_url = json_content.get('url')
error = None
assert file_url or json_content.get(
'content'), 'Response from isolate is missing both url and content.'
if file_url:
compressed_content, error = http_client_util.SendRequestToServer(
file_url, http_client)
else:
compressed_content = base64.b64decode(json_content['content'])
return zlib.decompress(
compressed_content) if compressed_content else None, error
| 34.113636
| 76
| 0.734843
|
7953487d46fb5d2ae1081d6b9e98d1f8e9e90990
| 27,653
|
py
|
Python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_12_01/aio/operations_async/_express_route_ports_operations_async.py
|
LianwMS/azure-sdk-for-python
|
612d7bca9de86ee1bd1fa59291d7bf897ba9213f
|
[
"MIT"
] | 2
|
2019-05-17T21:24:53.000Z
|
2020-02-12T11:13:42.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_12_01/aio/operations_async/_express_route_ports_operations_async.py
|
LianwMS/azure-sdk-for-python
|
612d7bca9de86ee1bd1fa59291d7bf897ba9213f
|
[
"MIT"
] | 15
|
2019-07-12T18:18:04.000Z
|
2019-07-25T20:55:51.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_12_01/aio/operations_async/_express_route_ports_operations_async.py
|
LianwMS/azure-sdk-for-python
|
612d7bca9de86ee1bd1fa59291d7bf897ba9213f
|
[
"MIT"
] | 2
|
2020-05-21T22:51:22.000Z
|
2020-05-26T20:53:01.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRoutePortsOperations:
"""ExpressRoutePortsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
express_route_port_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
express_route_port_name: str,
**kwargs
) -> None:
"""Deletes the specified ExpressRoutePort resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_port_name: The name of the ExpressRoutePort resource.
:type express_route_port_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: None, or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
express_route_port_name=express_route_port_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def get(
self,
resource_group_name: str,
express_route_port_name: str,
**kwargs
) -> "models.ExpressRoutePort":
"""Retrieves the requested ExpressRoutePort resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_port_name: The name of ExpressRoutePort.
:type express_route_port_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRoutePort, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_12_01.models.ExpressRoutePort
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRoutePort"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
express_route_port_name: str,
parameters: "models.ExpressRoutePort",
**kwargs
) -> "models.ExpressRoutePort":
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRoutePort"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ExpressRoutePort')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
express_route_port_name: str,
parameters: "models.ExpressRoutePort",
**kwargs
) -> "models.ExpressRoutePort":
"""Creates or updates the specified ExpressRoutePort resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_port_name: The name of the ExpressRoutePort resource.
:type express_route_port_name: str
:param parameters: Parameters supplied to the create ExpressRoutePort operation.
:type parameters: ~azure.mgmt.network.v2018_12_01.models.ExpressRoutePort
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: ExpressRoutePort, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_12_01.models.ExpressRoutePort
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRoutePort"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
express_route_port_name=express_route_port_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
express_route_port_name: str,
parameters: "models.TagsObject",
**kwargs
) -> "models.ExpressRoutePort":
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRoutePort"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
async def begin_update_tags(
self,
resource_group_name: str,
express_route_port_name: str,
parameters: "models.TagsObject",
**kwargs
) -> "models.ExpressRoutePort":
"""Update ExpressRoutePort tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_port_name: The name of the ExpressRoutePort resource.
:type express_route_port_name: str
:param parameters: Parameters supplied to update ExpressRoutePort resource tags.
:type parameters: ~azure.mgmt.network.v2018_12_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: ExpressRoutePort, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_12_01.models.ExpressRoutePort
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRoutePort"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
express_route_port_name=express_route_port_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRoutePort', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["models.ExpressRoutePortListResult"]:
"""List all the ExpressRoutePort resources in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRoutePortListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_12_01.models.ExpressRoutePortListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRoutePortListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRoutePortListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts'} # type: ignore
def list(
self,
**kwargs
) -> AsyncIterable["models.ExpressRoutePortListResult"]:
"""List all the ExpressRoutePort resources in the specified subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRoutePortListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_12_01.models.ExpressRoutePortListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRoutePortListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRoutePortListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ExpressRoutePorts'} # type: ignore
| 48.599297
| 203
| 0.67103
|
795348a3139d9167a31a49489ab19707a24593b5
| 705
|
py
|
Python
|
cone_detector/bluetooth_communication/bluetooth_client.py
|
Art31/trekking-pro-cefetrj
|
37ab58759b42978cbd8d950bd75c487e1292cb2b
|
[
"Apache-1.1"
] | null | null | null |
cone_detector/bluetooth_communication/bluetooth_client.py
|
Art31/trekking-pro-cefetrj
|
37ab58759b42978cbd8d950bd75c487e1292cb2b
|
[
"Apache-1.1"
] | null | null | null |
cone_detector/bluetooth_communication/bluetooth_client.py
|
Art31/trekking-pro-cefetrj
|
37ab58759b42978cbd8d950bd75c487e1292cb2b
|
[
"Apache-1.1"
] | null | null | null |
# --------------------------------------------------------------------------- #
# Title: Bluetooth communication client script
# Author: Arthur Telles
# Date: 02/07/2018 (DD/MM/YYYY)
# Description: This function opens up a port for bluetooth communication
# and send a message to the server.
# --------------------------------------------------------------------------- #
# Taken from http://blog.kevindoran.co/bluetooth-programming-with-python-3/
import bluetooth
serverMACAddress = '4C:34:88:32:82:68'
port = 5
s = bluetooth.BluetoothSocket(bluetooth.RFCOMM)
s.connect((serverMACAddress, port))
while 1:
text = "trekking works"
if text == "quit":
break
s.send(text)
sock.close()
| 30.652174
| 79
| 0.560284
|
795348d900c86491b7f8c25521aad6105ade0c0e
| 14,602
|
py
|
Python
|
tests/components/synology_dsm/test_config_flow.py
|
thomasloven/home-assistant
|
90fbb150e75f564045fb598c13bc23cb92e9f0da
|
[
"Apache-2.0"
] | 2
|
2022-01-24T18:59:56.000Z
|
2022-02-04T22:12:48.000Z
|
tests/components/synology_dsm/test_config_flow.py
|
thomasloven/home-assistant
|
90fbb150e75f564045fb598c13bc23cb92e9f0da
|
[
"Apache-2.0"
] | null | null | null |
tests/components/synology_dsm/test_config_flow.py
|
thomasloven/home-assistant
|
90fbb150e75f564045fb598c13bc23cb92e9f0da
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for the Synology DSM config flow."""
import logging
from unittest.mock import MagicMock, Mock, patch
import pytest
from synology_dsm.exceptions import (
SynologyDSMException,
SynologyDSMLogin2SAFailedException,
SynologyDSMLogin2SARequiredException,
SynologyDSMLoginInvalidException,
SynologyDSMRequestException,
)
from homeassistant import data_entry_flow, setup
from homeassistant.components import ssdp
from homeassistant.components.synology_dsm.config_flow import CONF_OTP_CODE
from homeassistant.components.synology_dsm.const import (
CONF_VOLUMES,
DEFAULT_PORT,
DEFAULT_PORT_SSL,
DEFAULT_SSL,
DOMAIN,
)
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_SSDP, SOURCE_USER
from homeassistant.const import (
CONF_DISKS,
CONF_HOST,
CONF_MAC,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
)
from homeassistant.helpers.typing import HomeAssistantType
from tests.common import MockConfigEntry
_LOGGER = logging.getLogger(__name__)
HOST = "nas.meontheinternet.com"
SERIAL = "mySerial"
HOST_2 = "nas.worldwide.me"
SERIAL_2 = "mySerial2"
PORT = 1234
SSL = True
USERNAME = "Home_Assistant"
PASSWORD = "password"
DEVICE_TOKEN = "Dév!cè_T0k€ñ"
MACS = ["00-11-32-XX-XX-59", "00-11-32-XX-XX-5A"]
@pytest.fixture(name="service")
def mock_controller_service():
"""Mock a successful service."""
with patch(
"homeassistant.components.synology_dsm.config_flow.SynologyDSM"
) as service_mock:
service_mock.return_value.information.serial = SERIAL
service_mock.return_value.utilisation.cpu_user_load = 1
service_mock.return_value.storage.disks_ids = ["sda", "sdb", "sdc"]
service_mock.return_value.storage.volumes_ids = ["volume_1"]
service_mock.return_value.network.macs = MACS
yield service_mock
@pytest.fixture(name="service_2sa")
def mock_controller_service_2sa():
"""Mock a successful service with 2SA login."""
with patch(
"homeassistant.components.synology_dsm.config_flow.SynologyDSM"
) as service_mock:
service_mock.return_value.login = Mock(
side_effect=SynologyDSMLogin2SARequiredException(USERNAME)
)
service_mock.return_value.information.serial = SERIAL
service_mock.return_value.utilisation.cpu_user_load = 1
service_mock.return_value.storage.disks_ids = ["sda", "sdb", "sdc"]
service_mock.return_value.storage.volumes_ids = ["volume_1"]
service_mock.return_value.network.macs = MACS
yield service_mock
@pytest.fixture(name="service_failed")
def mock_controller_service_failed():
"""Mock a failed service."""
with patch(
"homeassistant.components.synology_dsm.config_flow.SynologyDSM"
) as service_mock:
service_mock.return_value.information.serial = None
service_mock.return_value.utilisation.cpu_user_load = None
service_mock.return_value.storage.disks_ids = []
service_mock.return_value.storage.volumes_ids = []
service_mock.return_value.network.macs = []
yield service_mock
async def test_user(hass: HomeAssistantType, service: MagicMock):
"""Test user config."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=None
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
# test with all provided
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: HOST,
CONF_PORT: PORT,
CONF_SSL: SSL,
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == SERIAL
assert result["title"] == HOST
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == PORT
assert result["data"][CONF_SSL] == SSL
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_MAC] == MACS
assert result["data"].get("device_token") is None
assert result["data"].get(CONF_DISKS) is None
assert result["data"].get(CONF_VOLUMES) is None
service.return_value.information.serial = SERIAL_2
# test without port + False SSL
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: HOST,
CONF_SSL: False,
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == SERIAL_2
assert result["title"] == HOST
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == DEFAULT_PORT
assert not result["data"][CONF_SSL]
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_MAC] == MACS
assert result["data"].get("device_token") is None
assert result["data"].get(CONF_DISKS) is None
assert result["data"].get(CONF_VOLUMES) is None
async def test_user_2sa(hass: HomeAssistantType, service_2sa: MagicMock):
"""Test user with 2sa authentication config."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_HOST: HOST, CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "2sa"
# Failed the first time because was too slow to enter the code
service_2sa.return_value.login = Mock(
side_effect=SynologyDSMLogin2SAFailedException
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_OTP_CODE: "000000"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "2sa"
assert result["errors"] == {CONF_OTP_CODE: "otp_failed"}
# Successful login with 2SA code
service_2sa.return_value.login = Mock(return_value=True)
service_2sa.return_value.device_token = DEVICE_TOKEN
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_OTP_CODE: "123456"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == SERIAL
assert result["title"] == HOST
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == DEFAULT_PORT_SSL
assert result["data"][CONF_SSL] == DEFAULT_SSL
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_MAC] == MACS
assert result["data"].get("device_token") == DEVICE_TOKEN
assert result["data"].get(CONF_DISKS) is None
assert result["data"].get(CONF_VOLUMES) is None
async def test_import(hass: HomeAssistantType, service: MagicMock):
"""Test import step."""
# import with minimum setup
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_HOST: HOST, CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == SERIAL
assert result["title"] == HOST
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == DEFAULT_PORT_SSL
assert result["data"][CONF_SSL] == DEFAULT_SSL
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_MAC] == MACS
assert result["data"].get("device_token") is None
assert result["data"].get(CONF_DISKS) is None
assert result["data"].get(CONF_VOLUMES) is None
service.return_value.information.serial = SERIAL_2
# import with all
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_HOST: HOST_2,
CONF_PORT: PORT,
CONF_SSL: SSL,
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
CONF_DISKS: ["sda", "sdb", "sdc"],
CONF_VOLUMES: ["volume_1"],
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == SERIAL_2
assert result["title"] == HOST_2
assert result["data"][CONF_HOST] == HOST_2
assert result["data"][CONF_PORT] == PORT
assert result["data"][CONF_SSL] == SSL
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_MAC] == MACS
assert result["data"].get("device_token") is None
assert result["data"][CONF_DISKS] == ["sda", "sdb", "sdc"]
assert result["data"][CONF_VOLUMES] == ["volume_1"]
async def test_abort_if_already_setup(hass: HomeAssistantType, service: MagicMock):
"""Test we abort if the account is already setup."""
MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: HOST, CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
unique_id=SERIAL,
).add_to_hass(hass)
# Should fail, same HOST:PORT (import)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_HOST: HOST, CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
# Should fail, same HOST:PORT (flow)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_HOST: HOST, CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_login_failed(hass: HomeAssistantType, service: MagicMock):
"""Test when we have errors during login."""
service.return_value.login = Mock(
side_effect=(SynologyDSMLoginInvalidException(USERNAME))
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_HOST: HOST, CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_USERNAME: "login"}
async def test_connection_failed(hass: HomeAssistantType, service: MagicMock):
"""Test when we have errors during connection."""
service.return_value.login = Mock(
side_effect=SynologyDSMRequestException(IOError("arg"))
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_HOST: HOST, CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_HOST: "connection"}
async def test_unknown_failed(hass: HomeAssistantType, service: MagicMock):
"""Test when we have an unknown error."""
service.return_value.login = Mock(side_effect=SynologyDSMException)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_HOST: HOST, CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "unknown"}
async def test_missing_data_after_login(
hass: HomeAssistantType, service_failed: MagicMock
):
"""Test when we have errors during connection."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_HOST: HOST, CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "missing_data"}
async def test_form_ssdp_already_configured(
hass: HomeAssistantType, service: MagicMock
):
"""Test ssdp abort when the serial number is already configured."""
await setup.async_setup_component(hass, "persistent_notification", {})
MockConfigEntry(
domain=DOMAIN,
data={
CONF_HOST: HOST,
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
CONF_MAC: MACS,
},
unique_id=SERIAL,
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_SSDP},
data={
ssdp.ATTR_SSDP_LOCATION: "http://192.168.1.5:5000",
ssdp.ATTR_UPNP_FRIENDLY_NAME: "mydsm",
ssdp.ATTR_UPNP_SERIAL: "001132XXXX59", # Existing in MACS[0], but SSDP does not have `-`
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_form_ssdp(hass: HomeAssistantType, service: MagicMock):
"""Test we can setup from ssdp."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_SSDP},
data={
ssdp.ATTR_SSDP_LOCATION: "http://192.168.1.5:5000",
ssdp.ATTR_UPNP_FRIENDLY_NAME: "mydsm",
ssdp.ATTR_UPNP_SERIAL: "001132XXXX99", # MAC address, but SSDP does not have `-`
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "link"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "192.168.1.5"
assert result["data"][CONF_HOST] == "192.168.1.5"
assert result["data"][CONF_PORT] == 5001
assert result["data"][CONF_SSL] == DEFAULT_SSL
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_MAC] == MACS
assert result["data"].get("device_token") is None
assert result["data"].get(CONF_DISKS) is None
assert result["data"].get(CONF_VOLUMES) is None
| 36.873737
| 101
| 0.683673
|
79534994cbe8034b70020631d35d06d2c05c928d
| 16,861
|
py
|
Python
|
plugins/modules/oracle_user.py
|
blaf-cgi/ansible-oracle-modules
|
37905c6ad91808a96f0085c9c1069e166f2e17b4
|
[
"MIT"
] | null | null | null |
plugins/modules/oracle_user.py
|
blaf-cgi/ansible-oracle-modules
|
37905c6ad91808a96f0085c9c1069e166f2e17b4
|
[
"MIT"
] | null | null | null |
plugins/modules/oracle_user.py
|
blaf-cgi/ansible-oracle-modules
|
37905c6ad91808a96f0085c9c1069e166f2e17b4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2014 Mikael Sandström <oravirt@gmail.com>
# Copyright: (c) 2021, Ari Stark <ari.stark@netcourrier.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
module: oracle_user
short_description: Manages Oracle user/schema.
description:
- This module manages Oracle user/schema.
- It can create, alter or drop users.
- It can empty schemas (droping all its content).
- It can change password of users ; lock/unlock and expire/unexpire accounts.
- It can't be used to give privileges (refer to oracle_grant).
version_added: "0.8.0"
author:
- Mikael Sandström (@oravirt)
- Ari Stark (@ari-stark)
options:
authentication_type:
description:
- Type of authentication for the user.
- If not specified for a new user and no I(schema_password) is specified, there won't be authentication.
- If not specified and I(schema_password) is specified, value will be forced to I(password).
required: false
type: str
choices: ['external', 'global', 'no_authentication', 'password']
default_tablespace:
description:
- Default tablespace for the user.
- Tablespace must exist.
- If not specified for a new user, Oracle default will be used.
required: false
type: str
expired:
description:
- Expire or unexpire account.
- If not specified for a new user, Oracle default will be used.
required: false
type: bool
hostname:
description:
- Specify the host name or IP address of the database server computer.
default: localhost
type: str
locked:
description:
- Lock or unlock account.
- If not specified for a new user, Oracle default will be used.
required: false
type: bool
mode:
description:
- This option is the database administration privileges.
default: normal
type: str
choices: ['normal', 'sysdba']
oracle_home:
description:
- Define the directory into which all Oracle software is installed.
- Define ORACLE_HOME environment variable if set.
type: str
password:
description:
- Set the password to use to connect the database server.
- Must not be set if using Oracle wallet.
type: str
port:
description:
- Specify the listening port on the database server.
default: 1521
type: int
profile:
description:
- Profile of the user.
- Profile must exist.
- If not specified for a new user, Oracle default will be used.
required: false
type: str
schema_name:
description:
- Name of the user to manage.
required: true
type: str
aliases:
- name
schema_password:
description:
- Password of the user account.
- Required if I(authentication_type) is I(password).
required: false
type: str
service_name:
description:
- Specify the service name of the database you want to access.
required: true
type: str
state:
description:
- Specify the state of the user/schema.
- If I(state=empty), the schema will be purged, but not dropped.
- If I(state=absent), the tablespace will be droped, including all datafiles.
default: present
type: str
choices: ['absent', 'empty', 'present']
temporary_tablespace:
description:
- Default temporary tablespace for the user.
- Tablespace must exist.
- If not specified for a new user, Oracle default will be used.
required: false
type: str
username:
description:
- Set the login to use to connect the database server.
- Must not be set if using Oracle wallet.
type: str
aliases:
- user
requirements:
- Python module cx_Oracle
- Oracle basic tools.
notes:
- Check mode and diff mode are supported.
- Changes made by @ari-stark broke previous module interface.
'''
EXAMPLES = '''
- name: Create a new schema on a remote db by running the module on the controlmachine
oracle_user:
hostname: "remote-db-server"
service_name: "orcl"
username: "system"
password: "manager"
schema_name: "myschema"
schema_password: "mypass"
default_tablespace: "test"
state: "present"
- name: Drop a user on a remote db
oracle_user:
hostname: "remote-db-server"
service_name: "orcl"
username: "system"
password: "manager"
schema_name: "myschema"
state: "absent"
- name: Empty a schema on a remote db
oracle_user:
hostname: "remote-db-server"
service_name: "orcl"
username: "system"
password: "manager"
schema_name: "myschema"
state: "empty"
'''
RETURN = '''
ddls:
description: Ordered list of DDL requests executed during module execution.
returned: always
type: list
elements: str
'''
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.ari_stark.ansible_oracle_modules.plugins.module_utils.ora_db import OraDB
def get_existing_user(schema_name):
"""Check if the user/schema exists"""
data = ora_db.execute_select('select username,'
' account_status,'
' default_tablespace,'
' temporary_tablespace,'
' profile,'
' authentication_type,'
' oracle_maintained'
' from dba_users'
' where username = upper(:schema_name)', {'schema_name': schema_name})
if data:
row = data[0]
state = 'present'
expired = 'EXPIRED' in row[1]
locked = 'LOCKED' in row[1]
default_tablespace = row[2]
temporary_tablespace = row[3]
profile = row[4]
authentication_type = {'EXTERNAL': 'external', 'GLOBAL': 'global', 'NONE': None, 'PASSWORD': 'password'}[row[5]]
oracle_maintained = row[6] == 'Y'
diff['before']['state'] = state
diff['before']['expired'] = expired
diff['before']['locked'] = locked
diff['before']['default_tablespace'] = default_tablespace
diff['before']['temporary_tablespace'] = temporary_tablespace
diff['before']['profile'] = profile
diff['before']['authentication_type'] = authentication_type
if authentication_type == 'password':
diff['before']['schema_password'] = '**'
return {'username': schema_name, 'state': state, 'expired': expired, 'locked': locked,
'default_tablespace': default_tablespace, 'temporary_tablespace': temporary_tablespace,
'profile': profile, 'authentication_type': authentication_type, 'oracle_maintained': oracle_maintained}
else:
diff['before']['state'] = 'absent'
return None
def has_password_changed(schema_name, schema_password):
"""Check if password has changed."""
expected_error = 1017 # invalid username/password; logon denied
return ora_db.try_connect(schema_name, schema_password) == expected_error
def ensure_present(schema_name, authentication_type, schema_password, default_tablespace, temporary_tablespace,
profile, locked, expired, empty):
"""Create or modify the user"""
prev_user = get_existing_user(schema_name)
if prev_user:
changed = False
emptied = False
# Values are not changed by default, so after should be same as before
diff['after']['authentication_type'] = diff['before']['authentication_type']
diff['after']['default_tablespace'] = diff['before']['default_tablespace']
diff['after']['expired'] = diff['before']['expired']
diff['after']['locked'] = diff['before']['locked']
diff['after']['profile'] = diff['before']['profile']
diff['after']['temporary_tablespace'] = diff['before']['temporary_tablespace']
sql = 'alter user %s ' % schema_name
if authentication_type and authentication_type != prev_user['authentication_type']:
if authentication_type == 'external':
sql += 'identified externally '
elif authentication_type == 'global':
sql += 'identified globally '
elif authentication_type == 'password':
sql += 'identified by "%s" ' % schema_password
diff['after']['schema_password'] = '*'
else:
sql += 'no authentication '
diff['after']['authentication_type'] = authentication_type
changed = True
if default_tablespace and default_tablespace.lower() != prev_user['default_tablespace'].lower():
sql += 'default tablespace %s quota unlimited on %s ' % (default_tablespace, default_tablespace)
diff['after']['default_tablespace'] = default_tablespace
changed = True
if temporary_tablespace and temporary_tablespace.lower() != prev_user['temporary_tablespace'].lower():
sql += 'temporary tablespace %s ' % temporary_tablespace
diff['after']['temporary_tablespace'] = temporary_tablespace
changed = True
if profile and profile.lower() != prev_user['profile'].lower():
sql += 'profile %s ' % profile
diff['after']['profile'] = profile
changed = True
if locked is not None and locked != prev_user['locked']:
sql += 'account %s ' % ('lock' if locked else 'unlock')
diff['after']['locked'] = locked
changed = True
if expired is True and expired != prev_user['expired']:
sql += 'password expire '
diff['after']['expired'] = expired
changed = True
# If a password is defined and authentication type hasn't changed, we have to check :
# - if account must be unexpire
# - if password has changed
if schema_password and authentication_type == prev_user['authentication_type']:
# Unexpire account by defining a password
if expired is False and expired != prev_user['expired']:
sql += 'identified by "%s" ' % schema_password
diff['after']['expired'] = expired
diff['after']['password'] = '*'
changed = True
elif has_password_changed(schema_name, schema_password):
sql += 'identified by "%s" ' % schema_password
diff['after']['password'] = '*'
changed = True
if empty:
rows = ora_db.execute_select(
"select ao.object_name, ao.object_type"
" from all_objects ao"
" where ao.object_type in('TABLE', 'VIEW', 'PACKAGE', 'PROCEDURE', 'FUNCTION', 'SEQUENCE',"
" 'SYNONYM', 'TYPE', 'DATABASE LINK', 'TABLE PARTITION')"
" and ao.owner = '%s' and ao.generated = 'N' and not exists("
"select 1 from all_objects sq"
" where ao.object_name = sq.object_name"
" and sq.owner = '%s'"
" and ao.object_type = 'TABLE'"
" and sq.object_type = 'MATERIALIZED VIEW')"
" UNION"
" select ao.object_name, ao.object_type"
" from all_objects ao"
" where ao.object_type in('MATERIALIZED VIEW')"
" and ao.owner = '%s' and ao.generated = 'N'" % (schema_name.upper(),schema_name.upper(),schema_name.upper()))
for row in rows:
object_name = row[0]
object_type = row[1]
ora_db.execute_ddl('drop %s %s."%s" %s' % (
object_type, schema_name, object_name, 'cascade constraints' if object_type == 'TABLE' else ''))
if len(rows) != 0:
emptied = True
if changed or emptied:
if changed:
ora_db.execute_ddl(sql)
module.exit_json(msg='User %s changed and/or schema emptied.' % schema_name, changed=True, diff=diff,
ddls=ora_db.ddls)
else:
module.exit_json(msg='User %s already exists.' % schema_name, changed=False, diff=diff, ddls=ora_db.ddls)
else:
sql = 'create user %s ' % schema_name
if authentication_type == 'external':
sql += 'identified externally '
elif authentication_type == 'global':
sql += 'identified globally '
elif authentication_type == 'password':
sql += 'identified by "%s" ' % schema_password
else:
sql += 'no authentication '
if default_tablespace:
sql += 'default tablespace %s quota unlimited on %s ' % (default_tablespace, default_tablespace)
if temporary_tablespace:
sql += 'temporary tablespace %s ' % temporary_tablespace
if profile:
sql += 'profile %s ' % profile
if locked:
sql += 'account lock '
if expired:
sql += 'password expire '
ora_db.execute_ddl(sql)
module.exit_json(msg='User %s has been created.' % schema_name, changed=True, diff=diff, ddls=ora_db.ddls)
def ensure_absent(schema_name):
"""Drop the user if it exists"""
prev_user = get_existing_user(schema_name)
if prev_user and prev_user['oracle_maintained']:
module.fail_json(msg='Cannot drop a system user.', changed=False)
elif prev_user:
ora_db.execute_ddl('drop user %s cascade' % schema_name)
module.exit_json(msg='User %s dropped.' % schema_name, changed=True, diff=diff, ddls=ora_db.ddls)
else:
module.exit_json(msg="User %s doesn't exist." % schema_name, changed=False, diff=diff, ddls=ora_db.ddls)
def main():
global module
global ora_db
global diff
module = AnsibleModule(
argument_spec=dict(
authentication_type=dict(type='str', required=False,
choices=['external', 'global', 'no_authentication', 'password']),
default_tablespace=dict(type='str', default=None),
expired=dict(type='bool', default=None),
hostname=dict(type='str', default='localhost'),
locked=dict(type='bool', default=None),
mode=dict(type='str', default='normal', choices=['normal', 'sysdba']),
oracle_home=dict(type='str', required=False),
password=dict(type='str', required=False, no_log=True),
port=dict(type='int', default=1521),
profile=dict(type='str', default=None),
schema_name=dict(type='str', required=True, aliases=['name']),
schema_password=dict(type='str', default=None, no_log=True),
service_name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['absent', 'empty', 'present']),
temporary_tablespace=dict(type='str', default=None),
username=dict(type='str', required=False, aliases=['user']),
),
required_together=[['username', 'password']],
supports_check_mode=True,
)
authentication_type = module.params['authentication_type']
default_tablespace = module.params['default_tablespace']
expired = module.params['expired']
locked = module.params['locked']
profile = module.params['profile']
schema_name = module.params['schema_name']
schema_password = module.params['schema_password']
state = module.params['state']
temporary_tablespace = module.params['temporary_tablespace']
# Transforming parameters
if schema_password:
authentication_type = 'password'
ora_db = OraDB(module)
diff = {'before': {'schema_name': schema_name},
'after': {'state': state,
'schema_name': schema_name, }}
if state in ['empty', 'present']:
ensure_present(schema_name, authentication_type, schema_password, default_tablespace, temporary_tablespace,
profile, locked, expired, state == 'empty')
elif state == 'absent':
ensure_absent(schema_name)
if __name__ == '__main__':
main()
| 39.672941
| 129
| 0.594864
|
795349bef189292795edc182cb3031ca8fa26be4
| 1,911
|
py
|
Python
|
cli/check.py
|
sysdiglabs/syscli
|
f72f476f7b555ce8be4d1b6d6ce21c53400aeb08
|
[
"Apache-2.0"
] | null | null | null |
cli/check.py
|
sysdiglabs/syscli
|
f72f476f7b555ce8be4d1b6d6ce21c53400aeb08
|
[
"Apache-2.0"
] | 3
|
2019-01-30T12:32:13.000Z
|
2019-01-30T12:37:38.000Z
|
cli/check.py
|
sysdiglabs/syscli
|
f72f476f7b555ce8be4d1b6d6ce21c53400aeb08
|
[
"Apache-2.0"
] | 2
|
2019-01-21T11:05:09.000Z
|
2021-04-19T21:57:57.000Z
|
# Copyright 2018 Sysdig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from sdc.sdc_config import load_config_env
from sdc.sdc_extend import SdMonitorClient, SdSecureClient
from sdc.sdc_method_check import check_monitor, check_secure
from . import subparser
def check(args):
if not os.path.isdir(args.path):
raise NotADirectoryError(f"{args.path} is not a correct directory")
print("Checking if there are remote changes...")
config = load_config_env(args.file, args.env)
token = config["token"]
kind = config["kind"]
url = config["url"]
if token is None or token == "":
raise Exception("Token not provided, can't perform check")
if kind == "monitor":
something_changed = check_monitor(SdMonitorClient(token, url), args.path)
exit(0 if not something_changed else 1)
if kind == "secure":
something_changed = check_secure(SdSecureClient(token, url), args.path)
exit(0 if not something_changed else 1)
print(f"unknown kind of remote environment: {kind}")
exit(2)
_check_parser = subparser.add_parser("check", description="Checks if something has changed in the remote environment "
"comparing it with the backed up version")
_check_parser.add_argument("path", help="Path of the backup.")
_check_parser.set_defaults(func=check)
| 36.75
| 118
| 0.70225
|
795349f704b71b651d80f5faaaa1f1976e164ccc
| 1,089
|
py
|
Python
|
convert/widgets.py
|
aino/aino-convert
|
f3bd773f02a9645c75bfbd773e747dd8dc6e08f4
|
[
"BSD-3-Clause"
] | 1
|
2015-07-15T07:40:19.000Z
|
2015-07-15T07:40:19.000Z
|
convert/widgets.py
|
aino/aino-convert
|
f3bd773f02a9645c75bfbd773e747dd8dc6e08f4
|
[
"BSD-3-Clause"
] | null | null | null |
convert/widgets.py
|
aino/aino-convert
|
f3bd773f02a9645c75bfbd773e747dd8dc6e08f4
|
[
"BSD-3-Clause"
] | null | null | null |
from django import forms
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
from django.utils.encoding import force_unicode
from base import MediaFile
class AdminMediaFileWidget(forms.TextInput):
"""
This is just the core of a widget, how it get the actual imput value is
left to the user, javascript hint.
"""
def __init__(self, attrs=None):
final_attrs = {'class': 'mediafile'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminMediaFileWidget, self).__init__(attrs=final_attrs)
def render(self, name, value, attrs=None):
if value is None: value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
img_tag = ''
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_unicode(value)
img_tag = MediaFile(value).thumbnail('100x100>').tag
return mark_safe(u'%s<input%s />' % (img_tag, flatatt(final_attrs)))
| 36.3
| 79
| 0.645546
|
79534a10dc30dfdde4c81c528df02c72113a0d37
| 256
|
py
|
Python
|
gamefixes/397540.py
|
manueliglesiasgarcia/protonfixes
|
d676b6bf39f6e4268b4791d3d71c6d74e2127121
|
[
"BSD-2-Clause"
] | 54
|
2019-06-21T22:03:45.000Z
|
2022-03-20T19:24:36.000Z
|
gamefixes/397540.py
|
manueliglesiasgarcia/protonfixes
|
d676b6bf39f6e4268b4791d3d71c6d74e2127121
|
[
"BSD-2-Clause"
] | 21
|
2020-06-13T22:49:18.000Z
|
2022-03-20T08:28:39.000Z
|
gamefixes/397540.py
|
manueliglesiasgarcia/protonfixes
|
d676b6bf39f6e4268b4791d3d71c6d74e2127121
|
[
"BSD-2-Clause"
] | 53
|
2019-09-11T15:23:25.000Z
|
2022-03-20T08:18:49.000Z
|
""" Game fix for Borderlands 3
"""
#pylint: disable=C0103
from protonfixes import util
def main():
""" Borderlands 3 vcrun2019 fix
"""
# Fixes the startup process.
util.protontricks('vcrun2019_ge')
util.protontricks('d3dcompiler_47')
| 19.692308
| 39
| 0.6875
|
79534a308f6182969b48d37463b5faab47b3623f
| 4,355
|
py
|
Python
|
artificial_detection/utils.py
|
MaratSaidov/artificial-text-detection
|
74b2100294232ec361db84fdc3a24fdeba1fce49
|
[
"MIT"
] | 12
|
2021-11-15T08:59:46.000Z
|
2022-03-06T15:42:24.000Z
|
artificial_detection/utils.py
|
MaratSaidov/artificial-text-detection
|
74b2100294232ec361db84fdc3a24fdeba1fce49
|
[
"MIT"
] | 2
|
2021-11-14T15:50:00.000Z
|
2021-11-20T12:17:29.000Z
|
artificial_detection/utils.py
|
MaratSaidov/artificial-text-detection
|
74b2100294232ec361db84fdc3a24fdeba1fce49
|
[
"MIT"
] | null | null | null |
import os
import pickle
import random
import zlib
from os import path
from typing import List, Optional
import pandas as pd
import torch
from transformers import DistilBertTokenizerFast
import wandb
from artificial_detection.data.data import BinaryDataset, TextDetectionDataset
class MockDataset:
"""
Mock dataset for testing.
"""
dataset = [
{"ru": "добрый день", "en": "good evening",},
{"ru": "извините", "en": "i am sorry",},
]
_translations = ["good evening", "i am sorry"]
dataset_name = "mock"
@classmethod
def targets(cls) -> List[str]:
return [sample["en"] for sample in cls.dataset]
@classmethod
def translations(cls) -> List[str]:
return cls._translations
@classmethod
def list(cls) -> List[str]:
dataset_list = []
for dct in cls.dataset:
dataset_list.extend([dct["ru"], dct["en"]])
return dataset_list
def get_dvc_storage_path() -> str:
"""
Get the full path to the DVC storage.
Returns
-------
str
Path to the DVC Storage.
"""
dir_path = path.dirname(path.dirname(path.realpath(__file__)))
return path.join(dir_path, "resources/data")
def get_dataset_path(dataset_name: str, langs: Optional[List[str]] = None, ext: str = "bin") -> str:
dvc_path = get_dvc_storage_path()
if langs:
dataset_real_name = f"{dataset_name}.{langs[0]}-{langs[1]}.{ext}"
else:
dataset_real_name = f"{dataset_name}.{ext}"
return path.join(dvc_path, dataset_real_name)
def load_binary_dataset(dataset_name: str, langs: Optional[List[str]] = None, ext: str = "bin") -> BinaryDataset:
dataset_path = get_dataset_path(dataset_name, langs=langs, ext=ext)
with open(dataset_path, "rb") as file:
compressed_dataset = file.read()
dumped_dataset = zlib.decompress(compressed_dataset)
dataset = pickle.loads(dumped_dataset)
return dataset
def save_binary_dataset(
dataset: BinaryDataset, dataset_name: str, langs: Optional[List[str]] = None, ext: str = "bin"
) -> None:
dataset_path = get_dataset_path(dataset_name, langs=langs, ext=ext)
with open(dataset_path, "wb") as file:
dumped_dataset = pickle.dumps(dataset, protocol=pickle.HIGHEST_PROTOCOL)
compressed_dataset = zlib.compress(dumped_dataset)
file.write(compressed_dataset)
def translations_to_torch_dataset(
targets: List[str], translations: List[str], easy_nmt_offline: Optional[bool] = None, device: Optional[str] = None
) -> TextDetectionDataset:
corpus = TextDetectionDataset.get_corpus(targets, translations)
labels = torch.FloatTensor([0, 1] * len(targets))
tokenizer_path = "resources/data/tokenizer" if easy_nmt_offline else "distilbert-base-uncased"
tokenizer = DistilBertTokenizerFast.from_pretrained(tokenizer_path)
encodings = tokenizer(corpus, truncation=True, padding=True)
encodings, labels = TextDetectionDataset.to_device(encodings, labels, device=device)
dataset = TextDetectionDataset(encodings, labels, device=device)
return dataset
def save_translations_texts(
sources: List[str], targets: List[str], translations: List[str], dataset_name: str, src_lang: str, trg_lang: str
) -> None:
"""
Saves data to csv.
"""
print("Saving sources/translations in csv...")
df_data = list(zip(sources, targets, translations))
df = pd.DataFrame(data=df_data, columns=["sources", "targets", "translations"])
csv_path = get_dataset_path(f"{dataset_name}.{src_lang}-{trg_lang}", ext="csv")
df.to_csv(csv_path, index=False)
def ord_cyrillic(c: str) -> int:
if "а" <= c <= "я":
return ord(c) - ord("а") + ord("a") # - cyrillic + latinic
if "А" <= c <= "Я":
return ord(c) - ord("А") + ord("A")
return ord(c)
def setup_experiment_tracking(run_name: str) -> None:
os.environ["WANDB_MODE"] = "offline"
token = os.environ.get("WANDB_TOKEN", None)
wandb.login(key=token)
wandb.init(project="artificial-text-detection", name=run_name)
def stop_experiment_tracking() -> None:
wandb.finish()
def fix_random_seed(seed: int = 42) -> None:
"""
Fixing a random seed.
"""
torch.backends.cudnn.deterministic = True
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
random.seed(seed)
| 31.107143
| 118
| 0.676005
|
79534b01e98eb8b9e3bb04bf5095a778e1671674
| 10,862
|
py
|
Python
|
genqr.py
|
mamewotoko/podcast_spider
|
1e505413ccbb0a7b6c33048e25a42bce55624974
|
[
"Apache-2.0"
] | null | null | null |
genqr.py
|
mamewotoko/podcast_spider
|
1e505413ccbb0a7b6c33048e25a42bce55624974
|
[
"Apache-2.0"
] | null | null | null |
genqr.py
|
mamewotoko/podcast_spider
|
1e505413ccbb0a7b6c33048e25a42bce55624974
|
[
"Apache-2.0"
] | null | null | null |
# coding:utf-8
data = [
{ "station": "Tokyo FM", "data":
[{"url": "http://www.tfm.co.jp/podcasts/tfmvoice/podcast.xml", "link": "http://www.tfm.co.jp/podcasts/tfmvoice/", "title": "Voice of TOKYO FM"},
{"url": "http://www.tfm.co.jp/podcasts/future/podcast.xml", "link": "http://www.tfm.co.jp/podcasts/future/", "title": "\u672a\u6765\u6388\u696d"},
{"url": "http://www.tfm.co.jp/podcasts/museum/podcast.xml", "link": "http://www.tfm.co.jp/podcasts/museum/", "title": "Tokyo Midtown presents The Lifestyle MUSEUM"},
{"url": "http://www.tfm.co.jp/podcasts/dojo/podcast.xml", "link": "http://www.tfm.co.jp/podcasts/dojo/", "title": "\u30a8\u30d5\u30a8\u30e0\u82b8\u8853\u9053\u5834"},
{"url": "http://www.tfm.co.jp/podcasts/matsuda/podcast.xml", "link": "http://www.tfm.co.jp/lock/matsuda4/", "title": "JASRAC presents \u677e\u7530LOCKS!"},
{"url": "http://www.tfm.co.jp/podcasts/hr/podcast.xml", "link": "http://www.tfm.co.jp/podcasts/hr/", "title": "\u30a4\u30aa\u30f3 presents \u30cf\u30fc\u30c8\u30d5\u30eb\u30fb\u30ea\u30af\u30a8\u30b9\u30c8"},
{"url": "http://www.tfm.co.jp/podcasts/avanti/podcast.xml", "link": "http://www.avanti-web.com/", "title": "SUNTORY SATURDAY WAITING BAR AVANTI"},
{"url": "http://www.tfm.co.jp/podcasts/yuka/podcast.xml", "link": "http://www.tfm.co.jp/yuka/", "title": "\u512a\u9999\u306e\u30b3\u30b3\u30ed\u30fb\u30ab\u30e9\u30c0\u30fb\u30b2\u30f3\u30ad\u3002"},
{"url": "http://www.tfm.co.jp/podcasts/kando/podcast.xml", "link": "http://www.tfm.co.jp/podcasts/kando/", "title": "\u6bce\u65e5\u306b\u5922\u4e2d\u3000\u611f\u52d5\u30d7\u30ed\u30c7\u30e5\u30fc\u30b5\u30fc\u30fb\u5c0f\u6797\u7ae0\u4e00"},
{"url": "http://www.tfm.co.jp/podcasts/ya/podcast.xml", "link": "http://www.tfm.co.jp/podcasts/ya/", "title": "\u58f0\u512a\u587e\u3000Presents \u3042\u3079\u306a\u304c\u306e\u91ce\u671b\uff5e\u4ed6\u529b\u672c\u9858\u306e\u5909\uff5e"},
{"url": "http://www.tfm.co.jp/podcasts/job/podcast.xml", "link": "http://www.tfm.co.jp/podcasts/job/", "title": "\u30b7\u30e5\u30fc\u30ab\u30c4\u306e\u738b"},
{"url": "http://www.tfm.co.jp/podcasts/premium/podcast.xml", "link": "http://www.tfm.co.jp/podcasts/premium/", "title": "TOKYO FM Premium Podcasts"}]},
{ "station": "ニッポン放送", "data":
[{"url": "http://podcast.1242.com/tsurube/index.xml", "link": "http://podcast.1242.com", "title": "\u7b11\u798f\u4ead\u9db4\u74f6 \u65e5\u66dc\u65e5\u306e\u305d\u308c PODCAST"},
{"url": "http://podcast.1242.com/sanshiro/index.xml", "link": "http://podcast.1242.com/", "title": "\u4e09\u56db\u90ce\u306e\u30aa\u30fc\u30eb\u30ca\u30a4\u30c8\u30cb\u30c3\u30dd\u30f30"},
{"url": "http://podcast.1242.com/kikimasu/index.xml", "link": "http://podcast.1242.com/", "title": "\u5927\u8c37\u30ce\u30d6\u5f66\u30ad\u30ad\u30de\u30b9\uff01"},
{"url": "http://podcast.1242.com/aruko2/index.xml", "link": "http://podcast.1242.com/", "title": "\u30a2\u30eb\u30b3\uff06\u30d4\u30fc\u30b9\u306e\u30aa\u30fc\u30eb\u30ca\u30a4\u30c8\u30cb\u30c3\u30dd\u30f30(ZERO)"},
{"url": "http://podcast.1242.com/zoom/index.xml", "link": "http://podcast.1242.com/", "title": "\u8f9b\u574a\u6cbb\u90ce\u3000\u30ba\u30fc\u30e0\u3000\u305d\u3053\u307e\u3067\u8a00\u3046\u304b\uff01"},
{"url": "http://podcast.1242.com/keiko/index.xml", "link": "http://podcase.1242.com/", "title": "\u30df\u30c3\u30c4\u30fb\u30de\u30f3\u30b0\u30ed\u30fc\u30d6\u306e\u30aa\u30fc\u30eb\u30ca\u30a4\u30c8\u30cb\u30c3\u30dd\u30f3GOLD"},
{"url": "http://podcast.1242.com/takada/index.xml", "link": "http://podcast.1242.com", "title": "\u9ad8\u7530\u6587\u592b\u306e\u30e9\u30b8\u30aa\u30d3\u30d0\u30ea\u30fc\u663c\u30baPodcast"},
{"url": "http://podcast.1242.com/uchu/index.xml", "link": "http://podcast.1242.com/", "title": "\u300c\u306f\u3084\u3076\u30552\u300d\u5fdc\u63f4\u30ad\u30e3\u30f3\u30da\u30fc\u30f3\u3000\u5b87\u5b99\u306e\u9b45\u529b\u30ad\u30ad\u30de\u30b9\uff01"},
{"url": "http://podcast.1242.com/wage/index.xml", "link": "http://podcast.1242.com/", "title": "\u5c0f\u5cf6\u3088\u3057\u304a\u3068\u304b\u3082\u3081\u3093\u305f\u308b\u306e\u30aa\u30fc\u30eb\u30ca\u30a4\u30c8\u30cb\u30c3\u30dd\u30f3\u30e2\u30d0\u30a4\u30eb"},
{"url": "http://podcast.1242.com/voice/index.xml", "link": "http://podcast.1242.com", "title": "\u30b6\u30fb\u30dc\u30a4\u30b9\u3000\u305d\u3053\u307e\u3067\u8a00\u3046\u304b\uff01"},
{"url": "http://podcast.1242.com/sand/index.xml", "link": "http://podcast.1242.com", "title": "\u30b5\u30f3\u30c9\u30a6\u30a3\u30c3\u30c1\u30de\u30f3\u306e\u6771\u5317\u9b42"},
{"url": "http://podcast.1242.com/otona/index.xml", "link": "http://podcast.1242.com/", "title": "\u304c\u3093\u691c\u8a3a\u306bGO!GO!\u30aa\u30c8\u30ca\u306e\u5065\u5eb7\u30e9\u30b8\u30aa"},
{"url": "http://podcast.1242.com/an/index.xml", "link": "http://podcast.1242.com", "title": "\u674f\u306eanytime andante podcast"},
{"url": "http://podcast.1242.com/hoppy/index.xml", "link": "http://podcast.1242.com/", "title": "\u770b\u677f\u5a18\u30db\u30c3\u30d4\u30fc\u30fb\u30df\u30fc\u30ca\u306eHOPPY HAPPY BAR"},
{"url": "http://podcast.1242.com/fairies/index.xml", "link": "http://podcast.1242.com/", "title": "\u6771\u4eac\u30a8\u30ec\u30af\u30c8\u30ed\u30f3\u30d7\u30ec\u30bc\u30f3\u30c4\u3000\u30d5\u30a7\u30a2\u30ea\u30fc\u30ba\u306eFly to the World \u30b0\u30ed\u30fc\u30d0\u30fc\u5148\u751f\u306e\u30ef\u30f3\u30dd\u30a4\u30f3\u30c8\u30ec\u30c3\u30b9\u30f3"},
{"url": "http://podcast.1242.com/buzz/index.xml", "link": "http://podcast.1242.com/", "title": "BUZZ\u3000SHANGHAI"},
{"url": "http://podcast.1242.com/bengoshi/index.xml", "link": "http://www.allnightnippon.com/bengoshi/", "title": "\u89d2\u7530\u9f8d\u5e73\u306e\u30aa\u30fc\u30eb\u30ca\u30a4\u30c8\u30cb\u30c3\u30dd\u30f3\u30dd\u30c3\u30c9\u30ad\u30e3\u30b9\u30c8"},
{"url": "http://podcast.1242.com/scoop/index.xml", "link": "http://podcast.1242.com/", "title": "\u30c6\u30ea\u30fc\u4f0a\u85e4\u306e\u30d5\u30e9\u30a4\u30c7\u30fc\u30b9\u30af\u30fc\u30d7\u3000\u305d\u3053\u307e\u3067\u8a00\u3046\u304b\uff01"},
{"url": "http://podcast.1242.com/toda/index.xml", "link": "http://podcast.1242.com", "title": "\u6238\u7530\u6075\u5b50\u306e\u30aa\u30c8\u30ca\u30af\u30aa\u30ea\u30c6\u30a3"},
{"url": "http://podcast.1242.com/ps/index.xml", "link": "http://podcast.1242.com", "title": "\u30cb\u30c3\u30dd\u30f3\u653e\u9001 Podcasting STATION"}]},
{ "station": "FMとやま", "data":
[{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_9.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_9.html", "title": "\u30c7\u30a4\u30ea\u30fc\u30fb\u30b3\u30e9\u30e0"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_13.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_13.html", "title": "\u300cgrace\u300d\u3000\u30ad\u30ce\u30b3\u30ec"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_38.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_38.html", "title": "\u30a2\u30af\u30b7\u30e7\u30f3 \u30ec\u30c7\u30a3\u3000\u30b4\u30fc\u301c\u6642\u4ee3\u3092\u62d3\u304f\u5973\u6027\u305f\u3061\u301c"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_18.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_18.html", "title": "\u5b50\u3069\u3082\u306b\u4f1d\u3048\u305f\u3044\u3000\u5bcc\u5c71\u306e\u6c11\u8a71"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_42.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_42.html", "title": "\u30ec\u30c7\u30a3\u30aa\u30fb\u30b8\u30e3\u30fc\u30ca\u30eb\u301c\u304c\u3093\u3070\u308b\u5bcc\u5c71\u306e\u5143\u6c17\u4f01\u696d"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_24.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_24.html", "title": "\u30bb\u30f3\u30c6\u30a3\u30a2\u3000\u30c6\u30cb\u30b9\u30b9\u30c8\u30fc\u30ea\u30fc\uff06\u30e8\u30ac\u30b9\u30bf\u30b8\u30aa\u30e6\u30cb\u30aa\u30f3"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_41.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_41.html", "title": "KUROBE THE TRY\uff01\u3000\u301c\u884c\u3063\u3066\u307f\u3088\u3046\uff01\u3084\u3063\u3066\u307f\u3088\u3046\uff01\u301c"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_2.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_2.html", "title": "BOUSAI\u3000RADIO"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_40.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_40.html", "title": "\u30ad\u30e9\u30ea\u3000\u30ad\u30c8\u30ad\u30c8\u30b3\u30e9\u30e0"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_21.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_21.html", "title": "grace\u3000\u3086\u3063\u304d\u301c\u306efirst\u3000report"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_32.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_32.html", "title": "\u30b8\u30e5\u30fc\u30d7\u30e9\u30b9\u66ae\u3089\u3057\u306b\u7b11\u9854\u30d7\u30e9\u30b9\uff01\uff01"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_30.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_30.html", "title": "\u30e9\u30b8\u30aa\u30c9\u30e9\u30de\u300e\u3075\u305f\u308a\u306f\u304d\u3063\u3068\u3064\u306a\u304c\u3063\u3066\u3044\u308b\u300f"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_37.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_37.html", "title": "\u306a\u308b\u307b\u3069\u30de\u30cd\u30fc\u30e9\u30a4\u30d5"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_4.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_4.html", "title": "\u77e5\u4e8b\u306e\u770c\u653f\u3056\u3063\u304f\u3070\u3089\u3093"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_26.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_26.html", "title": "\u3055\u304f\u3089\u3000\u8033\u304b\u3089\u306e\u4f4f\u5b85\u30fb\u30ea\u30d5\u30a9\u30fc\u30e0\u6700\u65b0\u60c5\u5831"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_8.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_8.html", "title": "\u6749\u672c\u82f1\u4e16\u306e\u30b4\u30eb\u30d5\u30fb\u30af\u30ea\u30cb\u30c3\u30af"},
{"url": "http://www.fmtoyama.co.jp/contents/podcast/podcast_33.xml", "link": "http://www.fmtoyama.co.jp/contents/podcast_33.html", "title": "KUROBE\u3000\u301cTHE\u3000FUTURE\u301c"}]}
];
import json, subprocess, os
TEXT2QR = "/Users/tak/dev/text2qr/bin/text2qr.sh"
OUTDIR = "qrcode"
if not os.path.exists(OUTDIR):
os.makedirs(OUTDIR)
station_id = 1
for station in data:
podcast_id = 1
for podcast in station['data']:
filename = "%s/qrcode%03d_%03d.png" % (OUTDIR, station_id, podcast_id)
p = subprocess.Popen([TEXT2QR, podcast["url"], filename])
p.wait()
if p.returncode != 0:
print "qrcode error: $s, %s" %(filename, podcast["url"])
else:
podcast["qrcode"] = filename
podcast_id += 1
station_id += 1
print json.dumps(data)
| 132.463415
| 353
| 0.715154
|
79534cb265d44260cde83715f111c446ef4a91a3
| 11,719
|
py
|
Python
|
ztf_viewer/figures.py
|
snad-space/ztf-viewer
|
a0152d415beb11095134d0e407956ea088db1684
|
[
"MIT"
] | 2
|
2020-11-14T02:24:57.000Z
|
2021-06-04T06:13:42.000Z
|
ztf_viewer/figures.py
|
snad-space/ztf-viewer
|
a0152d415beb11095134d0e407956ea088db1684
|
[
"MIT"
] | 74
|
2020-07-13T09:38:41.000Z
|
2022-03-31T15:58:04.000Z
|
ztf_viewer/figures.py
|
snad-space/ztf-viewer
|
a0152d415beb11095134d0e407956ea088db1684
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from io import BytesIO, StringIO
import matplotlib
import matplotlib.backends.backend_pgf
import matplotlib.figure
import numpy as np
import pandas as pd
from astropy.time import Time
from flask import Response, request, send_file
from immutabledict import immutabledict
from matplotlib.ticker import AutoMinorLocator
from ztf_viewer.app import app
from ztf_viewer.cache import cache
from ztf_viewer.catalogs import find_ztf_oid
from ztf_viewer.exceptions import NotFound
from ztf_viewer.util import FILTER_COLORS, FILTERS_ORDER, parse_json_to_immutable, ZTF_FILTERS, flip
MJD_OFFSET = 58000
@cache()
def get_plot_data(cur_oid, dr, other_oids=frozenset(), min_mjd=None, max_mjd=None, additional_data=immutabledict()):
"""Get plot data
additional_data format is:
{
'id1': [
{
'mjd': 58800.3,
'mag': 18.1,
'magerr': 0.34,
'filter': 'r',
},
...
],
...
}
"""
oids = [cur_oid]
oids.extend(sorted(other_oids, key=int))
lcs = {}
for oid in oids:
if oid == cur_oid:
size = 3
else:
size = 1
lc = find_ztf_oid.get_lc(oid, dr, min_mjd=min_mjd, max_mjd=max_mjd)
meta = find_ztf_oid.get_meta(oid, dr)
for obs in lc:
obs['oid'] = oid
obs['fieldid'] = meta['fieldid']
obs['rcid'] = meta['rcid']
obs['filter'] = meta['filter']
obs['mark_size'] = size
lcs[oid] = lc
for identifier, lc in additional_data.items():
list_lc = []
for obs in lc:
obs = dict(obs)
obs['oid'] = identifier
obs['mark_size'] = 3
list_lc.append(obs)
lcs[identifier] = list_lc
for oid, lc in lcs.items():
mjd = np.array([obs['mjd'] for obs in lc])
time = Time(mjd, format='mjd')
for t, obs in zip(time, lc):
obs[f'mjd_{MJD_OFFSET}'] = obs['mjd'] - MJD_OFFSET
obs['date'] = t.strftime('%Y-%m-%d')
obs['cur_oid'] = cur_oid
return lcs
@cache()
def get_folded_plot_data(cur_oid, dr, period, offset=None, other_oids=frozenset(), min_mjd=None, max_mjd=None,
additional_data=immutabledict()):
if offset is None:
offset = MJD_OFFSET
lcs = get_plot_data(cur_oid, dr, other_oids=other_oids, min_mjd=min_mjd, max_mjd=max_mjd,
additional_data=additional_data)
for lc in lcs.values():
for obs in lc:
obs['folded_time'] = (obs['mjd'] - offset) % period
obs['phase'] = obs['folded_time'] / period
return lcs
MIMES = {
'pdf': 'application/pdf',
'png': 'image/png',
}
def save_fig(fig, fmt):
bytes_io = BytesIO()
if fmt == 'pdf':
canvas = matplotlib.backends.backend_pgf.FigureCanvasPgf(fig)
canvas.print_pdf(bytes_io)
else:
fig.savefig(bytes_io, format=fmt)
return bytes_io
def plot_data(oid, dr, data, fmt='png', caption=True):
usetex = fmt == 'pdf'
lcs = {}
seen_filters = set()
for lc_oid, lc in data.items():
if len(lc) == 0:
continue
first_obs = lc[0]
fltr = first_obs['filter']
marker = 's'
if lc_oid == oid:
marker = 'o'
if fltr not in ZTF_FILTERS:
marker = 'd'
marker_size = 12
if lc_oid == oid:
marker_size = 24
if fltr not in ZTF_FILTERS:
marker_size = 36
zorder = 1
if lc_oid == oid:
zorder = 2
if fltr not in ZTF_FILTERS:
zorder = 3
lcs[lc_oid] = {
'filter': fltr,
't': [obs['mjd'] for obs in lc],
'm': [obs['mag'] for obs in lc],
'err': [obs['magerr'] for obs in lc],
'color': FILTER_COLORS[fltr],
'marker_size': marker_size,
'label_errorbar': '' if fltr in seen_filters or fltr not in ZTF_FILTERS else fltr,
'label_scatter': '' if fltr in seen_filters or fltr in ZTF_FILTERS else fltr,
'marker': marker,
'zorder': zorder,
}
seen_filters.add(fltr)
fig = matplotlib.figure.Figure(dpi=300, figsize=(6.4, 4.8), constrained_layout=True)
if caption:
fig.text(
0.50,
0.005,
f'Generated with the SNAD ZTF viewer on {datetime.now().date()}',
ha='center',
fontdict=dict(size=8, color='grey', usetex=usetex),
)
ax = fig.subplots()
ax.invert_yaxis()
ax.set_title(str(oid), usetex=usetex)
ax.set_xlabel('MJD', usetex=usetex)
ax.set_ylabel('magnitude', usetex=usetex)
ax.xaxis.set_minor_locator(AutoMinorLocator(2))
ax.yaxis.set_minor_locator(AutoMinorLocator(2))
ax.tick_params(which='major', direction='in', length=6, width=1.5)
ax.tick_params(which='minor', direction='in', length=4, width=1)
for lc in lcs.values():
ax.errorbar(
lc['t'],
lc['m'],
lc['err'],
c=lc['color'],
label=lc['label_errorbar'],
marker='',
zorder=lc['zorder'],
ls='',
alpha=0.7,
)
ax.scatter(
lc['t'],
lc['m'],
c=lc['color'],
label=lc['label_scatter'],
marker=lc['marker'],
s=lc['marker_size'],
linewidths=0.5,
edgecolors='black',
zorder=lc['zorder'],
alpha=0.7,
)
legend_anchor_y = -0.026 if usetex else -0.032
handles, labels = zip(*sorted(zip(*ax.get_legend_handles_labels()), key=lambda hl: FILTERS_ORDER[hl[1]]))
ax.legend(
flip(handles, 3), flip(labels, 3),
bbox_to_anchor=(1, legend_anchor_y),
ncol=min(3, len(seen_filters)),
columnspacing=0.5,
frameon=False,
handletextpad=0.0,
)
bytes_io = save_fig(fig, fmt)
return bytes_io.getvalue()
def plot_folded_data(oid, dr, data, period, offset=None, repeat=None, fmt='png', caption=True):
if repeat is None:
repeat = 2
usetex = fmt == 'pdf'
lcs = {}
seen_filters = set()
for lc_oid, lc in data.items():
if len(lc) == 0:
continue
first_obs = lc[0]
fltr = first_obs['filter']
lcs[lc_oid] = {
'filter': fltr,
'folded_time': np.array([obs['folded_time'] for obs in lc]),
'phase': np.array([obs['phase'] for obs in lc]),
'm': np.array([obs['mag'] for obs in lc]),
'err': np.array([obs['magerr'] for obs in lc]),
'color': FILTER_COLORS[fltr],
'marker_size': 24 if lc_oid == oid else 12,
'label': '' if fltr in seen_filters else fltr,
'marker': 'o' if lc_oid == oid else 's',
'zorder': 2 if lc_oid == oid else 1,
}
seen_filters.add(fltr)
fig = matplotlib.figure.Figure(dpi=300, figsize=(6.4, 4.8), constrained_layout=True)
if caption:
fig.text(
0.50,
0.005,
f'Generated with the SNAD ZTF viewer on {datetime.now().date()}',
ha='center',
fontdict=dict(size=8, color='grey', usetex=usetex),
)
ax = fig.subplots()
ax.invert_yaxis()
ax.set_title(f'{oid}, P = {period:.4g} days', usetex=usetex)
ax.set_xlabel('phase', usetex=usetex)
ax.set_ylabel('magnitude', usetex=usetex)
ax.xaxis.set_minor_locator(AutoMinorLocator(2))
ax.yaxis.set_minor_locator(AutoMinorLocator(2))
ax.tick_params(which='major', direction='in', length=6, width=1.5)
ax.tick_params(which='minor', direction='in', length=4, width=1)
for lc_oid, lc in sorted(lcs.items(), key=lambda item: FILTERS_ORDER[item[1]['filter']]):
for i in range(-1, repeat + 1):
label = ''
if i == 0:
label = lc['label']
ax.errorbar(
lc['phase'] + i,
lc['m'],
lc['err'],
c=lc['color'],
label=label,
marker='',
zorder=lc['zorder'],
ls='',
alpha=0.7,
)
ax.scatter(
lc['phase'] + i,
lc['m'],
c=lc['color'],
label='',
marker=lc['marker'],
s=lc['marker_size'],
linewidths=0.5,
edgecolors='black',
zorder=lc['zorder'],
alpha=0.7,
)
ax.set_xlim([-0.1, repeat + 0.1])
secax = ax.secondary_xaxis('top', functions=(lambda x: x * period, lambda x: x / period))
secax.set_xlabel('Folded time, days')
secax.minorticks_on()
secax.tick_params(direction='in', which='both')
legend_anchor_y = -0.026 if usetex else -0.032
ax.legend(
bbox_to_anchor=(1, legend_anchor_y),
ncol=min(3, len(seen_filters)),
columnspacing=0.5,
frameon=False,
handletextpad=0.0,
)
bytes_io = save_fig(fig, fmt)
return bytes_io.getvalue()
def parse_figure_args_helper(args, data=None):
fmt = args.get('format', 'png')
other_oids = frozenset(args.getlist('other_oid'))
min_mjd = args.get('min_mjd', None)
if min_mjd is not None:
min_mjd = float(min_mjd)
max_mjd = args.get('max_mjd', None)
if max_mjd is not None:
max_mjd = float(max_mjd)
caption = args.get('copyright', 'yes') != 'no'
if fmt not in MIMES:
return '', 404
if data:
data = parse_json_to_immutable(data)
else:
data = immutabledict()
return dict(fmt=fmt, other_oids=other_oids, min_mjd=min_mjd, max_mjd=max_mjd, caption=caption, additional_data=data)
@app.server.route('/<dr>/figure/<int:oid>', methods=['GET', 'POST'])
def response_figure(dr, oid):
kwargs = parse_figure_args_helper(request.args, request.get_data(cache=False))
fmt = kwargs.pop('fmt')
caption = kwargs.pop('caption')
data = get_plot_data(oid, dr, **kwargs)
img = plot_data(oid, dr, data, fmt=fmt, caption=caption)
return Response(
img,
mimetype=MIMES[fmt],
headers={'Content-disposition': f'attachment; filename={oid}.{fmt}'},
)
@app.server.route('/<dr>/figure/<int:oid>/folded/<float:period>')
def response_figure_folded(dr, oid, period):
kwargs = parse_figure_args_helper(request.args)
fmt = kwargs.pop('fmt')
caption = kwargs.pop('caption')
repeat = request.args.get('repeat', None)
if repeat is not None:
repeat = int(repeat)
data = get_folded_plot_data(oid, dr, period=period, **kwargs)
img = plot_folded_data(oid, dr, data, period=period, repeat=repeat, fmt=fmt, caption=caption)
return Response(
img,
mimetype=MIMES[fmt],
headers={'Content-disposition': f'attachment; filename={oid}.{fmt}'},
)
def get_csv(dr, oid):
lc = find_ztf_oid.get_lc(oid, dr)
if lc is None:
raise NotFound
df = pd.DataFrame.from_records(lc)
string_io = StringIO()
df.to_csv(string_io, index=False)
return string_io.getvalue()
@app.server.route('/<dr>/csv/<int:oid>')
def response_csv(dr, oid):
try:
csv = get_csv(dr, oid)
except NotFound:
return '', 404
return Response(
csv,
mimetype='text/csv',
headers={'Content-disposition': f'attachment; filename={oid}.csv'},
)
@app.server.route('/favicon.ico')
def favicon():
return send_file('static/img/logo.svg', mimetype='image/svg+xml')
| 30.920844
| 120
| 0.55986
|
79534d99ab7f86f334c6dd2fdffca3e1a007ae1b
| 8,521
|
py
|
Python
|
docs/source/conf.py
|
mtoothman/botocore
|
dc09517fac9f6c68d0a8dd1f9c1c08ae3ae64b08
|
[
"Apache-2.0"
] | null | null | null |
docs/source/conf.py
|
mtoothman/botocore
|
dc09517fac9f6c68d0a8dd1f9c1c08ae3ae64b08
|
[
"Apache-2.0"
] | null | null | null |
docs/source/conf.py
|
mtoothman/botocore
|
dc09517fac9f6c68d0a8dd1f9c1c08ae3ae64b08
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# botocore documentation build configuration file, created by
# sphinx-quickstart on Sun Dec 2 07:26:23 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
from botocore.session import get_session
from botocore.docs import generate_docs
generate_docs(os.path.dirname(os.path.abspath(__file__)), get_session())
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'botocore'
copyright = u'2013, Mitch Garnaat'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.16.'
# The full version, including alpha/beta/rc tags.
release = '1.16.22'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_show_sourcelink = False
html_sidebars = {
'**': ['logo-text.html',
'globaltoc.html',
'localtoc.html',
'searchbox.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'botocoredoc'
import guzzle_sphinx_theme
extensions.append("guzzle_sphinx_theme")
html_translator_class = 'guzzle_sphinx_theme.HTMLTranslator'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_theme = 'guzzle_sphinx_theme'
# Guzzle theme options (see theme.conf for more information)
html_theme_options = {
# hack to add tracking
"google_analytics_account": os.getenv('TRACKING', False),
"base_url": "http://docs.aws.amazon.com/aws-sdk-php/guide/latest/"
}
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'botocore.tex', u'botocore Documentation',
u'Mitch Garnaat', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'botocore', u'botocore Documentation',
[u'Mitch Garnaat'], 3)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'botocore', u'botocore Documentation',
u'Mitch Garnaat', 'botocore', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| 32.033835
| 80
| 0.717169
|
79534e3802dfc7e7ea11d15d061631af0aa355ad
| 2,229
|
py
|
Python
|
posthog/api/cohort.py
|
Kacppian/posthog
|
faa0696e3219f9162a3fd59501e126a7cd79ec8c
|
[
"MIT"
] | null | null | null |
posthog/api/cohort.py
|
Kacppian/posthog
|
faa0696e3219f9162a3fd59501e126a7cd79ec8c
|
[
"MIT"
] | null | null | null |
posthog/api/cohort.py
|
Kacppian/posthog
|
faa0696e3219f9162a3fd59501e126a7cd79ec8c
|
[
"MIT"
] | null | null | null |
from rest_framework import request, response, serializers, viewsets
from posthog.models import Cohort
from typing import Dict, Any, Optional
from posthog.api.user import UserSerializer
from posthog.tasks.calculate_cohort import calculate_cohort
from django.db.models import QuerySet, Count
class CohortSerializer(serializers.ModelSerializer):
created_by = UserSerializer(required=False, read_only=True)
count = serializers.SerializerMethodField()
class Meta:
model = Cohort
fields = [
"id",
"name",
"groups",
"deleted",
"is_calculating",
"created_by",
"created_at",
"last_calculation",
"count",
]
def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Cohort:
request = self.context["request"]
validated_data["created_by"] = request.user
validated_data["is_calculating"] = True
cohort = Cohort.objects.create(team=request.user.team_set.get(), **validated_data)
calculate_cohort.delay(cohort_id=cohort.pk)
return cohort
def update(self, cohort: Cohort, validated_data: Dict, *args: Any, **kwargs: Any) -> Cohort: # type: ignore
cohort.name = validated_data.get("name", cohort.name)
cohort.groups = validated_data.get("groups", cohort.groups)
cohort.deleted = validated_data.get("deleted", cohort.deleted)
cohort.is_calculating = True
cohort.save()
calculate_cohort.delay(cohort_id=cohort.pk)
return cohort
def get_count(self, action: Cohort) -> Optional[int]:
if hasattr(action, "count"):
return action.count # type: ignore
return None
class CohortViewSet(viewsets.ModelViewSet):
queryset = Cohort.objects.all()
serializer_class = CohortSerializer
def get_queryset(self) -> QuerySet:
queryset = super().get_queryset()
if self.action == "list": # type: ignore
queryset = queryset.filter(deleted=False)
queryset = queryset.annotate(count=Count("people"))
return queryset.filter(team=self.request.user.team_set.get()).select_related("created_by").order_by("id")
| 36.540984
| 113
| 0.656797
|
79534e4eed99d7cfd0a40a3bbba16462f6f1d956
| 374
|
py
|
Python
|
lbworkflow/tests/permissions.py
|
wearypossum4770/django-lb-workflow
|
8db36c7a8c5cf3aa2492048cad9fbf26d895c8c7
|
[
"MIT"
] | 194
|
2017-04-24T15:28:16.000Z
|
2021-12-29T03:35:28.000Z
|
lbworkflow/tests/permissions.py
|
wearypossum4770/django-lb-workflow
|
8db36c7a8c5cf3aa2492048cad9fbf26d895c8c7
|
[
"MIT"
] | 17
|
2018-05-31T07:45:42.000Z
|
2021-12-16T08:55:44.000Z
|
lbworkflow/tests/permissions.py
|
wearypossum4770/django-lb-workflow
|
8db36c7a8c5cf3aa2492048cad9fbf26d895c8c7
|
[
"MIT"
] | 67
|
2017-05-18T02:28:28.000Z
|
2022-01-20T02:05:10.000Z
|
from lbworkflow.views.permissions import BasePermission
class TestPermission(BasePermission):
def has_permission(self, request, view):
if request.user.username == "hr":
return False
return True
def has_object_permission(self, request, view, obj):
if request.user.username == "tom":
return False
return True
| 26.714286
| 56
| 0.657754
|
79534ef1e683c584299ffe5f2256bd25202e55ec
| 3,708
|
py
|
Python
|
contrib/macdeploy/custom_dsstore.py
|
catracoin/catracoin
|
0ecd8ec316e2bdb180325e26ff6bcb857ceea6c9
|
[
"MIT"
] | 1
|
2021-01-23T19:25:39.000Z
|
2021-01-23T19:25:39.000Z
|
contrib/macdeploy/custom_dsstore.py
|
catracoin/catracoin
|
0ecd8ec316e2bdb180325e26ff6bcb857ceea6c9
|
[
"MIT"
] | 1
|
2021-01-23T19:29:09.000Z
|
2021-01-25T03:58:45.000Z
|
contrib/macdeploy/custom_dsstore.py
|
catracoin/catracoin
|
0ecd8ec316e2bdb180325e26ff6bcb857ceea6c9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2013-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import biplist
from ds_store import DSStore
from mac_alias import Alias
import sys
output_file = sys.argv[1]
package_name_ns = sys.argv[2]
ds = DSStore.open(output_file, 'w+')
ds['.']['bwsp'] = {
'ShowStatusBar': False,
'WindowBounds': '{{300, 280}, {500, 343}}',
'ContainerShowSidebar': False,
'SidebarWidth': 0,
'ShowTabView': False,
'PreviewPaneVisibility': False,
'ShowToolbar': False,
'ShowSidebar': False,
'ShowPathbar': True
}
icvp = {
'gridOffsetX': 0.0,
'textSize': 12.0,
'viewOptionsVersion': 1,
'backgroundImageAlias': b'\x00\x00\x00\x00\x02\x1e\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd1\x94\\\xb0H+\x00\x05\x00\x00\x00\x98\x0fbackground.tiff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x99\xd19\xb0\xf8\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\r\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b.background\x00\x00\x10\x00\x08\x00\x00\xd1\x94\\\xb0\x00\x00\x00\x11\x00\x08\x00\x00\xd19\xb0\xf8\x00\x00\x00\x01\x00\x04\x00\x00\x00\x98\x00\x0e\x00 \x00\x0f\x00b\x00a\x00c\x00k\x00g\x00r\x00o\x00u\x00n\x00d\x00.\x00t\x00i\x00f\x00f\x00\x0f\x00\x02\x00\x00\x00\x12\x00\x1c/.background/background.tiff\x00\x14\x01\x06\x00\x00\x00\x00\x01\x06\x00\x02\x00\x00\x0cMacintosh HD\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xce\x97\xab\xc3H+\x00\x00\x01\x88[\x88\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02u\xab\x8d\xd1\x94\\\xb0devrddsk\xff\xff\xff\xff\x00\x00\t \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07bitcoin\x00\x00\x10\x00\x08\x00\x00\xce\x97\xab\xc3\x00\x00\x00\x11\x00\x08\x00\x00\xd1\x94\\\xb0\x00\x00\x00\x01\x00\x14\x01\x88[\x88\x00\x16\xa9\t\x00\x08\xfaR\x00\x08\xfaQ\x00\x02d\x8e\x00\x0e\x00\x02\x00\x00\x00\x0f\x00\x1a\x00\x0c\x00M\x00a\x00c\x00i\x00n\x00t\x00o\x00s\x00h\x00 \x00H\x00D\x00\x13\x00\x01/\x00\x00\x15\x00\x02\x00\x14\xff\xff\x00\x00\xff\xff\x00\x00',
'backgroundColorBlue': 1.0,
'iconSize': 96.0,
'backgroundColorGreen': 1.0,
'arrangeBy': 'none',
'showIconPreview': True,
'gridSpacing': 100.0,
'gridOffsetY': 0.0,
'showItemInfo': False,
'labelOnBottom': True,
'backgroundType': 2,
'backgroundColorRed': 1.0
}
alias = Alias.from_bytes(icvp['backgroundImageAlias'])
alias.volume.name = package_name_ns
alias.volume.posix_path = '/Volumes/' + package_name_ns
alias.volume.disk_image_alias.target.filename = package_name_ns + '.temp.dmg'
alias.volume.disk_image_alias.target.carbon_path = 'Macintosh HD:Users:\x00bitcoinuser:\x00Documents:\x00bitcoin:\x00bitcoin:\x00' + package_name_ns + '.temp.dmg'
alias.volume.disk_image_alias.target.posix_path = 'Users/bitcoinuser/Documents/bitcoin/bitcoin/' + package_name_ns + '.temp.dmg'
alias.target.carbon_path = package_name_ns + ':.background:\x00background.tiff'
icvp['backgroundImageAlias'] = biplist.Data(alias.to_bytes())
ds['.']['icvp'] = icvp
ds['.']['vSrn'] = ('long', 1)
ds['Applications']['Iloc'] = (370, 156)
ds['Catra-Qt.app']['Iloc'] = (128, 156)
ds.flush()
ds.close()
| 61.8
| 1,817
| 0.72411
|
79534f755a8caf7e1bb1b2866cb130753d1c4cd5
| 1,344
|
py
|
Python
|
similar_images_AE/src/clustering/KNN.py
|
somaliz/artificio
|
2279d1fd8d5e8f931e23cafb0b9cfbb802a9d36e
|
[
"Apache-2.0"
] | 11
|
2019-12-19T08:55:52.000Z
|
2021-10-01T13:07:13.000Z
|
clustering/artificio/similar_images_AE/src/clustering/KNN.py
|
amitbcp/Scikit_Learn_TensorFlow_Examples
|
37dda063e316503d53ac45f3b104a5cf1aaa4d78
|
[
"MIT"
] | 5
|
2019-10-09T01:41:19.000Z
|
2022-02-10T00:19:01.000Z
|
clustering/artificio/similar_images_AE/src/clustering/KNN.py
|
amitbcp/Scikit_Learn_TensorFlow_Examples
|
37dda063e316503d53ac45f3b104a5cf1aaa4d78
|
[
"MIT"
] | 7
|
2019-10-08T06:10:14.000Z
|
2020-12-01T07:49:21.000Z
|
'''
KNN.py (author: Anson Wong / github: ankonzoid)
General kNN model class object using sklearn library.
'''
from sklearn.neighbors import NearestNeighbors
class KNearestNeighbours(object):
def __init__(self):
# Parameters from training/test data set
self.n_train = None # number of training examples
self.n_test = None # number of test examples
self.d = None # number of features
# Parameters for kNN model
self.n_neighbours = None
self.algorithm = None
self.metric = None
self.model = None
super().__init__()
def compile(self, n_neighbors, algorithm, metric):
self.n_neighbors = n_neighbors
self.algorithm = algorithm
self.metric = metric
self.model = NearestNeighbors(n_neighbors=n_neighbors, algorithm=algorithm, metric=metric)
def fit(self, x_train):
self.n_train = x_train.shape[0]
self.d = x_train.shape[1]
self.model.fit(x_train) # fit kNN
def predict(self, x_test):
self.n_test = x_test.shape[0]
if x_test.shape[1] != self.d:
raise Exception("Inconsistent feature dimensions between training and test data!")
distances, indices = self.model.kneighbors(x_test, return_distance=True) # predict kNN
return distances, indices
| 33.6
| 98
| 0.65625
|
795350f77e289effdecbdc0147896e4a644f5e84
| 3,219
|
py
|
Python
|
python/Command.py
|
Sbte/hymls
|
75cb1e70eb0b3d71085e481cc9d418bdfada1a35
|
[
"Apache-2.0"
] | null | null | null |
python/Command.py
|
Sbte/hymls
|
75cb1e70eb0b3d71085e481cc9d418bdfada1a35
|
[
"Apache-2.0"
] | 17
|
2019-03-12T15:26:53.000Z
|
2021-02-02T20:07:02.000Z
|
python/Command.py
|
Sbte/hymls
|
75cb1e70eb0b3d71085e481cc9d418bdfada1a35
|
[
"Apache-2.0"
] | 2
|
2019-07-03T14:29:05.000Z
|
2022-02-21T12:44:40.000Z
|
"""This file contains classes that implement a thread that runs a command.
The process can then be killed after a certain time.
"""
import subprocess
import threading
import datetime
import os
from collections import OrderedDict
def git_command(command, suppress_errors=False):
p = None
if suppress_errors:
p = subprocess.Popen('git ' + command, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True)
else:
p = subprocess.Popen('git ' + command, stdout=subprocess.PIPE, shell=True)
(out, err) = p.communicate()
errno = p.poll()
if errno != 0 and not suppress_errors:
raise Exception('Command git ' + command + ' failed')
return out
class Command(object):
def __init__(self, cmd, env=None):
self.cmd = cmd
self.process = None
self.out = ''
self.err = ''
self.env = env
if self.env is not None:
self.env.update(os.environ)
def run(self, timeout=600):
def target():
self.out += 'Thread started\n'
self.out += 'Running ' + self.cmd + '\n'
self.process = subprocess.Popen(self.cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, executable="/bin/bash", env=self.env)
(out, err) = self.process.communicate()
if out:
self.out += out
if err:
self.err += err
self.out += 'Thread finished at ' + str(datetime.datetime.now()) + '\n'
thread = threading.Thread(target=target)
thread.start()
thread.join(timeout)
killed = False
if thread.is_alive():
self.kill()
thread.join()
killed = True
if self.process is None:
return (-1, killed)
self.out += 'Returncode is ' + str(self.process.returncode) + '\n'
return (self.process.returncode, killed)
def kill(self):
self.out += 'Terminating process\n'
subprocess.call('killall -9 '+self.cmd.partition(' ')[0], shell=True)
class ParallelCommand(Command):
def __init__(self, cmd, env=None, procs=1, nodes=1):
Command.__init__(self, cmd, env)
self.procs = procs
self.nodes = nodes
np = procs // nodes
self.mpis = OrderedDict([('mpiexec', 'mpiexec -n %d -npernode %d' % (self.procs, np)), ('mpirun', 'mpirun -n %d -npernode %d' % (self.procs, np)), ('srun', 'srun --nodes=%d --ntasks=%d --ntasks-per-node=%d' % (self.nodes, self.procs, np))])
self.orig_cmd = cmd
self.mpi = None
for mpi in self.mpis.iterkeys():
p = subprocess.Popen(mpi+' --help', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, executable="/bin/bash", env=self.env)
p.communicate()
if p.returncode == 0:
self.cmd = self.mpis[mpi] + ' ' + cmd
self.mpi = mpi
break
def kill(self):
super(ParallelCommand, self).kill()
if self.mpi:
subprocess.call('killall -9 '+self.mpi, shell=True)
if self.orig_cmd:
subprocess.call('killall -9 '+self.orig_cmd.partition(' ')[0], shell=True)
| 34.98913
| 248
| 0.574713
|
7953516139c48c15aa2a542890a5affe123f3e87
| 3,707
|
py
|
Python
|
Non-React Stuff/alexa/lambda/skill_env/ask_sdk_model/interfaces/amazonpay/model/v1/provider_credit.py
|
ReciPull/reciprogram
|
b8c7e4610f95c5beafad3c9880fc5beceec523e7
|
[
"MIT"
] | 1
|
2019-09-16T19:13:13.000Z
|
2019-09-16T19:13:13.000Z
|
Non-React Stuff/alexa/lambda/skill_env/ask_sdk_model/interfaces/amazonpay/model/v1/provider_credit.py
|
ReciPull/reciprogram
|
b8c7e4610f95c5beafad3c9880fc5beceec523e7
|
[
"MIT"
] | 5
|
2021-03-09T03:30:14.000Z
|
2022-02-26T10:42:17.000Z
|
alexa/reciPullLambda/ask_sdk_model/interfaces/amazonpay/model/v1/provider_credit.py
|
ReciPull/recipull.github.io
|
e6b800af02658bb7948297c4ddc1b7af6d978839
|
[
"MIT"
] | null | null | null |
# coding: utf-8
#
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the License.
#
import pprint
import re # noqa: F401
import six
import typing
from enum import Enum
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional
from datetime import datetime
from ask_sdk_model.interfaces.amazonpay.model.v1.price import Price
class ProviderCredit(object):
"""
:param provider_id: This is required only for Ecommerce provider (Solution provider) use cases.
:type provider_id: (optional) str
:param credit:
:type credit: (optional) ask_sdk_model.interfaces.amazonpay.model.v1.price.Price
"""
deserialized_types = {
'provider_id': 'str',
'credit': 'ask_sdk_model.interfaces.amazonpay.model.v1.price.Price'
} # type: Dict
attribute_map = {
'provider_id': 'providerId',
'credit': 'credit'
} # type: Dict
def __init__(self, provider_id=None, credit=None):
# type: (Optional[str], Optional[Price]) -> None
"""
:param provider_id: This is required only for Ecommerce provider (Solution provider) use cases.
:type provider_id: (optional) str
:param credit:
:type credit: (optional) ask_sdk_model.interfaces.amazonpay.model.v1.price.Price
"""
self.__discriminator_value = None # type: str
self.provider_id = provider_id
self.credit = credit
def to_dict(self):
# type: () -> Dict[str, object]
"""Returns the model properties as a dict"""
result = {} # type: Dict
for attr, _ in six.iteritems(self.deserialized_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else
x.value if isinstance(x, Enum) else x,
value
))
elif isinstance(value, Enum):
result[attr] = value.value
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else
(item[0], item[1].value)
if isinstance(item[1], Enum) else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
# type: () -> str
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
# type: () -> str
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
# type: (object) -> bool
"""Returns true if both objects are equal"""
if not isinstance(other, ProviderCredit):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
# type: (object) -> bool
"""Returns true if both objects are not equal"""
return not self == other
| 32.517544
| 103
| 0.596439
|
795352afe007d411067caa41e58b4c96efc917ea
| 392
|
py
|
Python
|
GPy/mappings/__init__.py
|
ekalosak/GPy
|
ff82f12c3d321bfc3ce6615447fad25aea9de6bd
|
[
"BSD-3-Clause"
] | 1,685
|
2015-01-03T14:46:25.000Z
|
2022-03-30T02:41:35.000Z
|
GPy/mappings/__init__.py
|
ekalosak/GPy
|
ff82f12c3d321bfc3ce6615447fad25aea9de6bd
|
[
"BSD-3-Clause"
] | 778
|
2015-01-15T18:21:25.000Z
|
2022-03-30T14:52:32.000Z
|
GPy/mappings/__init__.py
|
ekalosak/GPy
|
ff82f12c3d321bfc3ce6615447fad25aea9de6bd
|
[
"BSD-3-Clause"
] | 584
|
2015-01-06T06:30:43.000Z
|
2022-03-29T13:05:33.000Z
|
# Copyright (c) 2013, 2014 GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from .kernel import Kernel
from .linear import Linear
from .mlp import MLP
from .mlpext import MLPext
from .additive import Additive
from .compound import Compound
from .constant import Constant
from .identity import Identity
from .piecewise_linear import PiecewiseLinear
| 28
| 59
| 0.80102
|
795355213abf7605ca18cb432dd3cf95f191ca63
| 884
|
py
|
Python
|
api/admin.py
|
codydeny/PlaceMe-Backend
|
6d89e5384bcefbebf6bd1cc37415f405f9502988
|
[
"MIT"
] | 2
|
2020-09-29T02:51:33.000Z
|
2020-10-06T09:38:11.000Z
|
api/admin.py
|
codydeny/PlaceMe-Backend
|
6d89e5384bcefbebf6bd1cc37415f405f9502988
|
[
"MIT"
] | null | null | null |
api/admin.py
|
codydeny/PlaceMe-Backend
|
6d89e5384bcefbebf6bd1cc37415f405f9502988
|
[
"MIT"
] | 1
|
2020-09-29T09:40:32.000Z
|
2020-09-29T09:40:32.000Z
|
from django.contrib import admin
from .models import UserProfile, Department, Course, Branch, Faculty, Student, HighSchool, Intermediate, Diploma, Graduation, PostGraduation, Project, PrevSemesterData, HR, Company, JobNotification, JNeligibleCourse, AppliedJobNotification
# Register your models here.
admin.site.register(UserProfile)
admin.site.register(Department)
admin.site.register(Course)
admin.site.register(Branch)
admin.site.register(Faculty)
admin.site.register(Student)
admin.site.register(HighSchool)
admin.site.register(Intermediate)
admin.site.register(Diploma)
admin.site.register(Graduation)
admin.site.register(PostGraduation)
admin.site.register(Project)
admin.site.register(PrevSemesterData)
admin.site.register(HR)
admin.site.register(Company)
admin.site.register(JNeligibleCourse)
admin.site.register(JobNotification)
admin.site.register(AppliedJobNotification)
| 38.434783
| 239
| 0.839367
|
7953562e78bc5e1654617becc2a5d66ae6ccdef6
| 4,372
|
py
|
Python
|
scripts/payload2generator.py
|
TheOddZer0/MiShell32
|
3ea9187ba73cf1efe5396df48aae95a34be370bb
|
[
"Apache-2.0"
] | 5
|
2021-07-01T19:18:11.000Z
|
2022-02-23T00:36:08.000Z
|
scripts/payload2generator.py
|
TheOddZer0/MiShell32
|
3ea9187ba73cf1efe5396df48aae95a34be370bb
|
[
"Apache-2.0"
] | 1
|
2021-09-26T16:23:57.000Z
|
2021-09-28T15:04:50.000Z
|
scripts/payload2generator.py
|
TheOddZer0/MiShell32
|
3ea9187ba73cf1efe5396df48aae95a34be370bb
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
"""
Generate a generator.py from currently built payload
"""
import os
import sys
import os.path as path
import shutil
import subprocess
import shlex
# Configs
# Used to change some usual settings
# What will the file name of the backup of generator.py will
BACKUP_NAME = "generator.py.old"
# Wait, What will the name of the generator be
GENERATOR_NAME = "generator.py"
# Where of bin/ should we expect the payload
PAYLOAD_NAME = "payload.out"
# Where should objcopy put the result
OBJCOPY_RESULT = "payload"
ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATE = r'''#!/usr/bin/env python3
"""
generator.py, Generate a payload with any need to compile anything.
See the original repo for more info: https://github.com/TheOddZer0/mishell32
"""
import sys
import struct
def ready_port(num: str):
chunks = [num[i:i+2] for i in range(0, len(num), 2)]
return "\\x".join(chunks)
def ensure_length(num: str):
if len(num) % 2 != 0:
num = "0" + num
return num
shellcode = "{first_line}"
{more_lines}
try:
ip = sys.argv[1]
port = int(sys.argv[2])
except IndexError:
print(f"Usage: {sys.argv[0]} IP PORT")
raise SystemExit(1)
except ValueError:
print(f"port must be an integer")
raise SystemExit(1)
if port not in range(65536):
print(f"Invalid port '{port}'")
raise SystemExit(1)
parts = ip.split(".")
ip_parts = []
offset = []
for part in parts:
try:
part = int(part)
except ValueError:
print(f"Invalid IP '{ip}'")
raise SystemExit(1)
if part not in range(256):
print(f"Invalid IP '{ip}'")
raise SystemExit(1)
if part == 255:
print("This method will not work on your IP")
raise SystemExit(1)
_hexed = ensure_length(hex(part + 1)[2:])
if not ("00" in _hexed):
ip_parts.append(str(_hexed))
offset.append("01")
else:
ip_parts.append(str(ensure_length(hex(part + 2)[2:])))
offset.append("02")
# Replace the IP with the new ip
shellcode = shellcode.replace("\\x80\\x01\\x01\\x02", "\\x".join(["", *ip_parts]))
# The offset may need to change, Taken care now
shellcode = shellcode.replace("\\x01\\x01\\x01\\x01", "\\x".join(["", *offset]))
# Now replacing the port, then we are done
shellcode = shellcode.replace("\\x11\\x5c", "\\x" + ready_port(ensure_length(hex(port)[2:])))
print(shellcode)
'''
LINE_TEMPLATE = 'shellcode += "{}"\n'
def ensure_length(num: str):
if len(num) % 2 != 0:
num = "0" + num
return num
# Well, A programmer is a app generator, so,
# I'm a generator of a generator which generates a generator which
# that generator generates a payload...
print("Generating the generator itself...")
try:
subprocess.call("objcopy --dump-section .text={} {}".format(
shlex.quote(path.join(ROOT, "build", OBJCOPY_RESULT)),
shlex.quote(path.join(ROOT, "bin", PAYLOAD_NAME))).split(),
shell=False)
except Exception as e:
sys.stderr.write(f"objcopy call failed: {e}\n")
raise SystemExit(1)
with open(os.path.join(ROOT, "build", OBJCOPY_RESULT), "rb") as p:
data = p.read()
try:
os.remove(os.path.join(ROOT, "build", OBJCOPY_RESULT))
except Exception as e:
sys.stderr.write(f"Failed to remove objcopy output: {e} Ignoring...\n")
chunks = [data[i:i+12] for i in range(0, len(data), 12)]
generator = TEMPLATE.replace("{first_line}", "".join(
["\\\\x" + ensure_length(hex(i)[2:]) for i in chunks[0]]))
generator = generator.replace("{more_lines}", "".join(
[LINE_TEMPLATE.format(
"".join(["\\\\x" + ensure_length(hex(i)[2:]) for i in chunk])) for chunk in chunks[1:]]))
sys.stderr.write("Taking backup of current generator...\n")
try:
if path.exists(path.join(ROOT, "scripts", BACKUP_NAME)):
os.remove(path.join(ROOT, "scripts", BACKUP_NAME))
shutil.copy(path.join(ROOT, "scripts", GENERATOR_NAME),
path.join(ROOT, "scripts", BACKUP_NAME))
except FileNotFoundError:
sys.stderr.write("No generator to take backup from, Ignoring...\n")
except PermissionError as e:
sys.stderr.write(f"Cannot take backup ({e.strerror})\n")
raise SystemExit(1) if input(
"Should we continue [y/n]").lower().strip() != "y" else None
with open(path.join(ROOT, "scripts", GENERATOR_NAME), "w") as gen:
gen.write(generator)
| 28.575163
| 97
| 0.653477
|
795356a9717bfbc87d40b3f4c8ab1c4c21917c2b
| 8,377
|
py
|
Python
|
tools/train.py
|
dyabel/wsod-mmdet
|
60fc1993ea298f992b160b5599a6134702ac0d4f
|
[
"Apache-2.0"
] | 6
|
2021-10-09T05:34:04.000Z
|
2022-03-31T00:36:55.000Z
|
tools/train.py
|
dyabel/wsod-mmdet
|
60fc1993ea298f992b160b5599a6134702ac0d4f
|
[
"Apache-2.0"
] | null | null | null |
tools/train.py
|
dyabel/wsod-mmdet
|
60fc1993ea298f992b160b5599a6134702ac0d4f
|
[
"Apache-2.0"
] | null | null | null |
import argparse
import copy
import os
import os.path as osp
import time
import warnings
import mmcv
import torch
from mmcv import Config, DictAction
from mmcv.runner import get_dist_info, init_dist
from mmcv.utils import get_git_hash
from mmdet import __version__
from mmdet.apis import set_random_seed, train_detector
from mmdet.datasets import build_dataset
from mmdet.models import build_detector
from mmdet.utils import collect_env, get_root_logger
import wandb.sdk.internal.datastore
def parse_args():
parser = argparse.ArgumentParser(description='Train a detector')
parser.add_argument('config', help='train config file path')
parser.add_argument('--work-dir', help='the dir to save logs and models')
parser.add_argument('--name',default='wsod-mmdet')
parser.add_argument(
'--resume-from', help='the checkpoint file to resume from')
parser.add_argument(
'--no-validate',
action='store_true',
help='whether not to evaluate the checkpoint during training')
group_gpus = parser.add_mutually_exclusive_group()
group_gpus.add_argument(
'--gpus',
type=int,
help='number of gpus to use '
'(only applicable to non-distributed training)')
group_gpus.add_argument(
'--gpu-ids',
type=int,
nargs='+',
help='ids of gpus to use '
'(only applicable to non-distributed training)')
parser.add_argument('--seed', type=int, default=None, help='random seed')
parser.add_argument(
'--deterministic',
action='store_true',
help='whether to set deterministic options for CUDNN backend.')
parser.add_argument(
'--options',
nargs='+',
action=DictAction,
help='override some settings in the used config, the key-value pair '
'in xxx=yyy format will be merged into config file (deprecate), '
'change to --cfg-options instead.')
parser.add_argument(
'--cfg-options',
nargs='+',
action=DictAction,
help='override some settings in the used config, the key-value pair '
'in xxx=yyy format will be merged into config file. If the value to '
'be overwritten is a list, it should be like key="[a,b]" or key=a,b '
'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" '
'Note that the quotation marks are necessary and that no white space '
'is allowed.')
parser.add_argument(
'--launcher',
choices=['none', 'pytorch', 'slurm', 'mpi'],
default='none',
help='job launcher')
parser.add_argument('--local_rank', type=int, default=0)
args = parser.parse_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args.local_rank)
if args.options and args.cfg_options:
raise ValueError(
'--options and --cfg-options cannot be both '
'specified, --options is deprecated in favor of --cfg-options')
if args.options:
warnings.warn('--options is deprecated in favor of --cfg-options')
args.cfg_options = args.options
return args
def main():
timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime())
os.environ["WANDB_RUN_GROUP"] = "experiment-" + wandb.util.generate_id()
args = parse_args()
wandb.init(project=args.name)
# hyperparameter_defaults = dict(
# oam_max_num=10,
# score_thr1=0.3,
# score_thr2=0.7,
# empty_cf=30,
# lr=0.001,
# )
wandb.config.oam_max_num = 20
wandb.config.score_thr2 = 0.05
wandb.config.empty_cf = 30
wandb.config.ss_cf_thr = 50
# wandb.config.lr = 0.008
wandb.config.warm_iter = -1
wandb.config.strong_shot = 26
# wandb.init(config=hyperparameter_defaults)
wandb.config.config_file = args.config
wandb.config.work_dir = args.work_dir
wandb.config.max_map = 0
wandb.config.map = 0
wandb.config.loss_weak_scale = 1.0
cfg = Config.fromfile(args.config)
if args.cfg_options is not None:
cfg.merge_from_dict(args.cfg_options)
# import modules from string list.
if cfg.get('custom_imports', None):
from mmcv.utils import import_modules_from_strings
import_modules_from_strings(**cfg['custom_imports'])
# set cudnn_benchmark
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
# work_dir is determined in this priority: CLI > segment in file > filename
if args.work_dir is not None:
# update configs according to CLI args if args.work_dir is not None
cfg.work_dir = args.work_dir
elif cfg.get('work_dir', None) is None:
# use config filename as default work_dir if cfg.work_dir is None
cfg.work_dir = osp.join('./work_dirs',
osp.splitext(osp.basename(args.config))[0])
if args.resume_from is not None:
cfg.resume_from = args.resume_from
if args.gpu_ids is not None:
cfg.gpu_ids = args.gpu_ids
else:
cfg.gpu_ids = range(1) if args.gpus is None else range(args.gpus)
# init distributed env first, since logger depends on the dist info.
if args.launcher == 'none':
distributed = False
else:
distributed = True
init_dist(args.launcher, **cfg.dist_params)
# re-set gpu_ids with distributed training mode
_, world_size = get_dist_info()
cfg.gpu_ids = range(world_size)
# create work_dir
mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))
# dump config
cfg.dump(osp.join(cfg.work_dir, osp.basename(args.config)))
# init the logger before other steps
wandb.config.time = timestamp
log_file = osp.join(cfg.work_dir, f'{timestamp}.log')
wandb.config.logfile = log_file
logger = get_root_logger(log_file=log_file, log_level=cfg.log_level)
# init the meta dict to record some important information such as
# environment info and seed, which will be logged
meta = dict()
# log env info
env_info_dict = collect_env()
env_info = '\n'.join([(f'{k}: {v}') for k, v in env_info_dict.items()])
dash_line = '-' * 60 + '\n'
logger.info('Environment info:\n' + dash_line + env_info + '\n' +
dash_line)
meta['env_info'] = env_info
meta['config'] = cfg.pretty_text
# log some basic info
logger.info(f'Distributed training: {distributed}')
logger.info(f'Config:\n{cfg.pretty_text}')
# set random seeds
if args.seed is not None:
logger.info(f'Set random seed to {args.seed}, '
f'deterministic: {args.deterministic}')
set_random_seed(args.seed, deterministic=args.deterministic)
cfg.seed = args.seed
meta['seed'] = args.seed
meta['exp_name'] = osp.basename(args.config)
model = build_detector(
cfg.model,
train_cfg=cfg.get('train_cfg'),
test_cfg=cfg.get('test_cfg'))
wandb.config.val_data = cfg.data.val['ann_file']
if cfg.data.train['type'] == 'RepeatDataset':
wandb.config.train_data_type = cfg.data.train['dataset']['type']
wandb.config.repeat_times = cfg.data.train['times']
wandb.config.ann_file = cfg.data.train['dataset']['ann_file']
else:
wandb.config.train_data_type = cfg.data.train['type']
wandb.config.repeat_times = 1
wandb.config.ann_file = cfg.data.train['ann_file']
datasets = [build_dataset(cfg.data.train)]
if len(cfg.workflow) == 2:
val_dataset = copy.deepcopy(cfg.data.val)
val_dataset.pipeline = cfg.data.train.pipeline
datasets.append(build_dataset(val_dataset))
if cfg.checkpoint_config is not None:
# save mmdet version, config file content and class names in
# checkpoints as meta data
cfg.checkpoint_config.meta = dict(
mmdet_version=__version__ + get_git_hash()[:7],
CLASSES=datasets[0].CLASSES)
# add an attribute for visualization convenience
model.CLASSES = datasets[0].CLASSES
train_detector(
model,
datasets,
cfg,
distributed=distributed,
validate=(not args.no_validate),
timestamp=timestamp,
meta=meta)
# wandb.save(os.path.join(wandb.config.work_dir,'mymodel.h5'))
# fitlog.finish()
wandb.save("mymodel.h5")
if __name__ == '__main__':
main()
| 36.903084
| 79
| 0.653337
|
7953574f7e114520f8da618835e7bcfc4bb03f5b
| 222
|
py
|
Python
|
dadmatools/datasets/datasets/Wikipedia/info.py
|
njzr/DadmaTools
|
64ff407d5d818d5a9216340cccf0d1cc909d3b1b
|
[
"Apache-2.0"
] | 25
|
2021-12-01T15:19:36.000Z
|
2022-03-12T12:50:28.000Z
|
dadmatools/datasets/datasets/Wikipedia/info.py
|
ebad84/DadmaTools
|
b26ad8aa834f642d49bd120bd7cf1fdf40741be1
|
[
"Apache-2.0"
] | 3
|
2021-12-14T06:34:52.000Z
|
2022-02-17T08:23:20.000Z
|
dadmatools/datasets/datasets/Wikipedia/info.py
|
ebad84/DadmaTools
|
b26ad8aa834f642d49bd120bd7cf1fdf40741be1
|
[
"Apache-2.0"
] | 6
|
2021-10-12T13:44:17.000Z
|
2022-03-07T13:54:17.000Z
|
{
"name": "WikipediaCorpus",
"version": "20211201",
"task": "Corpus",
"description": "fawiki dump progress on 20211201 / All pages, current versions only.",
"size": 2184117,
"filenames": ["cleaned_wiki.txt"]
}
| 24.666667
| 88
| 0.653153
|
79535888e88f0de0b0d2c00d787ad624cfc5d8cc
| 1,749
|
py
|
Python
|
install_texlive/__init__.py
|
maxnoe/texlive-batch-installation
|
13bfdadd9fa951c5edd5830e6da07708bdce43e6
|
[
"MIT"
] | null | null | null |
install_texlive/__init__.py
|
maxnoe/texlive-batch-installation
|
13bfdadd9fa951c5edd5830e6da07708bdce43e6
|
[
"MIT"
] | null | null | null |
install_texlive/__init__.py
|
maxnoe/texlive-batch-installation
|
13bfdadd9fa951c5edd5830e6da07708bdce43e6
|
[
"MIT"
] | null | null | null |
import logging
import subprocess as sp
import os
import pexpect
import re
import requests
from io import BytesIO
import tarfile
__version__ = '0.3.0'
log = logging.getLogger(__name__)
has_curl = sp.call(['which', 'curl'], stdout=sp.PIPE) == 0
has_wget = sp.call(['which', 'wget'], stdout=sp.PIPE) == 0
URL = 'http://mirror.ctan.org/systems/texlive/tlnet/'
OLDURL = 'http://ftp.math.utah.edu/pub/tex/historic/systems/texlive/{v}/tlnet-final/'
def is_current(version):
r = requests.get('https://tug.org/texlive/')
r.raise_for_status()
m = re.search(r'Current release: TeX Live ([0-9]{4})', r.text)
if not m:
raise ValueError('Could not determine current TeX Live version')
current_version = int(m.groups()[0])
log.debug('Current version of TeX Live is {}'.format(current_version))
return current_version == version
def download(version=None, outdir='.'):
os.makedirs(outdir, exist_ok=True)
if version is None or is_current(version):
url = URL + 'install-tl-unx.tar.gz'
else:
url = OLDURL.format(v=version) + 'install-tl-unx.tar.gz'
log.debug('Downloading from {}'.format(url))
ret = requests.get(url)
ret.raise_for_status()
tar = tarfile.open(fileobj=BytesIO(ret.content), mode='r:gz')
tar.extractall(outdir)
def command(process, pattern, send, **kwargs):
process.expect(pattern, **kwargs)
process.sendline(send)
def get_size(process):
timeout = process.timeout
process.timeout = 1
lines = ''
try:
while True:
lines += process.readline().decode()
except pexpect.TIMEOUT:
pass
size = re.findall(r'disk space required: ([0-9]+ MB)', lines)[-1]
process.timeout = timeout
return size
| 25.347826
| 85
| 0.661521
|
795358e0f166153b511c0ff13074ec2c6b5dac38
| 217
|
py
|
Python
|
play/cosmocalc.py
|
tamarastro/tastro
|
76f9175531652a86dc938f4fdaf296a9d6f422bc
|
[
"MIT"
] | null | null | null |
play/cosmocalc.py
|
tamarastro/tastro
|
76f9175531652a86dc938f4fdaf296a9d6f422bc
|
[
"MIT"
] | null | null | null |
play/cosmocalc.py
|
tamarastro/tastro
|
76f9175531652a86dc938f4fdaf296a9d6f422bc
|
[
"MIT"
] | null | null | null |
import numpy as np
from astropy.cosmology import FlatLambdaCDM as cosmo
cosmo1 = cosmo(H0=70, Om0=0.3)
print cosmo1
cosmo2 = cosmo(H0=72, Om0=0.28)
print cosmo2
print('But this might be better.... editing master.')
| 21.7
| 53
| 0.746544
|
79535aa210d61d3739686ab4f0a268719390e451
| 2,841
|
py
|
Python
|
setup/setup.py
|
Ordiel/brython
|
58d9372dd338a6a258df8e867caeb8097002d876
|
[
"BSD-3-Clause"
] | 1
|
2019-07-28T09:04:44.000Z
|
2019-07-28T09:04:44.000Z
|
setup/setup.py
|
Ordiel/brython
|
58d9372dd338a6a258df8e867caeb8097002d876
|
[
"BSD-3-Clause"
] | null | null | null |
setup/setup.py
|
Ordiel/brython
|
58d9372dd338a6a258df8e867caeb8097002d876
|
[
"BSD-3-Clause"
] | null | null | null |
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
import os
import shutil
import sys
with open('README.rst', encoding='utf-8') as fobj:
LONG_DESCRIPTION = fobj.read()
command = sys.argv[1]
if command == "sdist":
# before creating the distribution, copy files from other locations in
# the repository
print("copying files...")
this_dir = os.getcwd()
root_dir = os.path.dirname(this_dir)
src_dir = os.path.join(root_dir, "www", "src")
data_dir = os.path.join(this_dir, "data")
# copy files from /www/src
for fname in ["brython.js", "brython_stdlib.js", "unicode.txt"]:
shutil.copyfile(os.path.join(src_dir, fname),
os.path.join(data_dir, fname))
# copy demo.html
with open(os.path.join(root_dir, 'www', 'demo.html'), encoding="utf-8") as f:
demo = f.read()
start_tag = "<!-- start copy -->"
end_tag = "<!-- end copy -->"
start = demo.find(start_tag)
if start == -1:
raise Exception("No tag <!-- start copy --> in demo.html")
end = demo.find(end_tag)
if end == -1:
raise Exception("No tag <!-- end copy --> in demo.html")
body = demo[start + len(start_tag) : end].strip()
with open(os.path.join(data_dir, "demo.tmpl"), encoding="utf-8") as f:
template = f.read()
demo = template.replace("{{body}}", body)
with open(os.path.join(data_dir, "demo.html"), "w", encoding="utf-8") as out:
out.write(demo)
setup(
name='brython',
version='3.7.3',
description='Brython is an implementation of Python 3 running in the browser',
long_description=LONG_DESCRIPTION,
# The project's main homepage.
url='http://brython.info',
# Author details
author='Pierre Quentel',
author_email='quentel.pierre@orange.fr',
packages = ['data', 'data.tools'],
# Choose your license
license='BSD',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Interpreters',
'Operating System :: OS Independent',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: BSD License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3',
],
# What does your project relate to?
keywords='Python browser',
py_modules=["brython", "list_modules"],
package_data={
'data': [
'README.txt',
'demo.html',
'brython.js',
'brython_stdlib.js',
'unicode.txt'
]
}
)
| 28.128713
| 82
| 0.612812
|
79535af131ff427b006c3e04558d8214ccb87881
| 355
|
py
|
Python
|
tests/parser/syntax/test_string.py
|
leonprou/vyper
|
8228395a9f4d4b55dbb8dcded3f4eb1ccfbbbe4b
|
[
"MIT"
] | null | null | null |
tests/parser/syntax/test_string.py
|
leonprou/vyper
|
8228395a9f4d4b55dbb8dcded3f4eb1ccfbbbe4b
|
[
"MIT"
] | null | null | null |
tests/parser/syntax/test_string.py
|
leonprou/vyper
|
8228395a9f4d4b55dbb8dcded3f4eb1ccfbbbe4b
|
[
"MIT"
] | null | null | null |
import pytest
from vyper import (
compiler,
)
valid_list = [
"""
@public
def foo() -> string[10]:
return "badminton"
""",
"""
@public
def foo():
x: string[11] = "¡très bien!"
"""
]
@pytest.mark.parametrize('good_code', valid_list)
def test_string_success(good_code):
assert compiler.compile_code(good_code) is not None
| 14.791667
| 55
| 0.630986
|
79535bbf2b96668d1adec23c505225ae38a07f08
| 757
|
py
|
Python
|
tests/test_sqlalchemy.py
|
tackle-io/woodchipper
|
d2d92f7c79c940b5263795c0b18b3b46386fbc53
|
[
"MIT"
] | 3
|
2022-02-04T16:21:00.000Z
|
2022-02-28T22:33:05.000Z
|
tests/test_sqlalchemy.py
|
tackle-io/woodchipper
|
d2d92f7c79c940b5263795c0b18b3b46386fbc53
|
[
"MIT"
] | 3
|
2022-01-10T16:53:40.000Z
|
2022-03-03T13:12:58.000Z
|
tests/test_sqlalchemy.py
|
tackle-io/woodchipper
|
d2d92f7c79c940b5263795c0b18b3b46386fbc53
|
[
"MIT"
] | null | null | null |
import sqlalchemy
import woodchipper
from woodchipper.configs import DevLogToStdout
from woodchipper.context import LoggingContext
from woodchipper.monitors.sqlalchemy import SQLAlchemyMonitor
engine = sqlalchemy.create_engine("sqlite:///:memory:")
def connect(sa_monitor):
sa_monitor.engine = engine
SQLAlchemyMonitor.instance_setup_cb = connect
woodchipper.configure(config=DevLogToStdout, facilities={"": "INFO"}, monitors=[SQLAlchemyMonitor])
def test_sqlalchemy():
logger = woodchipper.get_logger(__name__)
with LoggingContext(test="sqlalchemy"):
with engine.connect() as conn:
rows = conn.execute("SELECT 1")
logger.info("SQL result.", row=rows.fetchone())
# FIXME: Actually test something here
| 29.115385
| 99
| 0.752972
|
79535be8f2bf5eb6cc1c2a724758f30cc8995338
| 5,382
|
py
|
Python
|
website/drawquest/apps/brushes/migrations/0016_paintbucket_copy.py
|
bopopescu/drawquest-web
|
8d8f9149b6efeb65202809a5f8916386f58a1b3b
|
[
"BSD-3-Clause"
] | 19
|
2015-11-10T17:36:20.000Z
|
2021-04-12T07:36:00.000Z
|
website/drawquest/apps/brushes/migrations/0016_paintbucket_copy.py
|
bopopescu/drawquest-web
|
8d8f9149b6efeb65202809a5f8916386f58a1b3b
|
[
"BSD-3-Clause"
] | 1
|
2021-06-09T03:45:34.000Z
|
2021-06-09T03:45:34.000Z
|
website/drawquest/apps/brushes/migrations/0016_paintbucket_copy.py
|
bopopescu/drawquest-web
|
8d8f9149b6efeb65202809a5f8916386f58a1b3b
|
[
"BSD-3-Clause"
] | 6
|
2015-11-11T00:38:38.000Z
|
2020-07-25T20:10:08.000Z
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
brush = orm.Brush.objects.get(canonical_name='paintbucket')
brush.description = "Need to color the whole canvas? Patty is always happy to help out. She'll fill the entire screen with a single color."
brush.save()
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'brushes.brush': {
'Meta': {'ordering': "['ordinal']", 'object_name': 'Brush'},
'blue': ('django.db.models.fields.IntegerField', [], {}),
'canonical_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'cost': ('django.db.models.fields.IntegerField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'green': ('django.db.models.fields.IntegerField', [], {}),
'iap_product_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'iphone_label': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'is_for_sale': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_new': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'ordinal': ('django.db.models.fields.IntegerField', [], {}),
'owned_by_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'owners': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['canvas_auth.User']", 'symmetrical': 'False'}),
'red': ('django.db.models.fields.IntegerField', [], {})
},
u'canvas_auth.user': {
'Meta': {'object_name': 'User', 'db_table': "u'auth_user'", '_ormbases': [u'auth.User'], 'proxy': 'True'}
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['brushes']
symmetrical = True
| 68.126582
| 195
| 0.570977
|
79535bfa6ae14371d5b3330bd32c252ae803f203
| 758
|
py
|
Python
|
pddlrl/validate.py
|
IBM/pddlrl
|
e057cc67426c91c9180286a67acad5b9a1ba7fc6
|
[
"MIT"
] | 5
|
2022-03-24T16:47:21.000Z
|
2022-03-25T16:04:04.000Z
|
pddlrl/validate.py
|
IBM/pddlrl
|
e057cc67426c91c9180286a67acad5b9a1ba7fc6
|
[
"MIT"
] | null | null | null |
pddlrl/validate.py
|
IBM/pddlrl
|
e057cc67426c91c9180286a67acad5b9a1ba7fc6
|
[
"MIT"
] | null | null | null |
# This file is a part of PDDLRL project.
# Copyright (c) 2020 Clement Gehring (clement@gehring.io)
# Copyright (c) 2021 Masataro Asai (guicho2.71828@gmail.com, masataro.asai@ibm.com), IBM Corporation
import os.path
import subprocess
directory, basename = os.path.split(__file__)
VAL = os.path.join(directory, "../VAL/build/linux64/Release/bin/Validate")
def validate(domainfile=None, problemfile=None, planfile=None):
args = [VAL, domainfile, problemfile, planfile]
args = [ arg for arg in args if arg ]
return subprocess.run(args)
def arrival(domainfile=None, problemfile=None, planfile=None):
args = ["arrival", domainfile, problemfile, planfile, "/dev/null"]
args = [ arg for arg in args if arg ]
return subprocess.run(args)
| 34.454545
| 100
| 0.726913
|
79535c65adca6a84d9e8254147d8830acc7e0335
| 3,772
|
py
|
Python
|
models/position_encoding.py
|
miranmanesh/ConditionalDETR
|
b3ab2ea01208a1c7f4a1668a9c7682d13e59bc45
|
[
"Apache-2.0"
] | 186
|
2021-08-16T02:53:25.000Z
|
2022-03-31T14:27:30.000Z
|
models/position_encoding.py
|
encounter1997/DE-CondDETR
|
c7d24c221125daa6322adc9915af77701240f063
|
[
"Apache-2.0"
] | 18
|
2021-08-24T07:01:18.000Z
|
2022-03-29T12:53:10.000Z
|
models/position_encoding.py
|
encounter1997/DE-CondDETR
|
c7d24c221125daa6322adc9915af77701240f063
|
[
"Apache-2.0"
] | 19
|
2021-08-16T06:45:55.000Z
|
2022-03-20T12:53:57.000Z
|
# ------------------------------------------------------------------------
# Conditional DETR
# Copyright (c) 2021 Microsoft. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 [see LICENSE for details]
# ------------------------------------------------------------------------
# Copied from DETR (https://github.com/facebookresearch/detr)
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
# ------------------------------------------------------------------------
"""
Various positional encodings for the transformer.
"""
import math
import torch
from torch import nn
from util.misc import NestedTensor
class PositionEmbeddingSine(nn.Module):
"""
This is a more standard version of the position embedding, very similar to the one
used by the Attention is all you need paper, generalized to work on images.
"""
def __init__(self, num_pos_feats=64, temperature=10000, normalize=False, scale=None):
super().__init__()
self.num_pos_feats = num_pos_feats
self.temperature = temperature
self.normalize = normalize
if scale is not None and normalize is False:
raise ValueError("normalize should be True if scale is passed")
if scale is None:
scale = 2 * math.pi
self.scale = scale
def forward(self, tensor_list: NestedTensor):
x = tensor_list.tensors
mask = tensor_list.mask
assert mask is not None
not_mask = ~mask
y_embed = not_mask.cumsum(1, dtype=torch.float32)
x_embed = not_mask.cumsum(2, dtype=torch.float32)
if self.normalize:
eps = 1e-6
y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale
x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale
dim_t = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device)
dim_t = self.temperature ** (2 * (dim_t // 2) / self.num_pos_feats)
pos_x = x_embed[:, :, :, None] / dim_t
pos_y = y_embed[:, :, :, None] / dim_t
pos_x = torch.stack((pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4).flatten(3)
pos_y = torch.stack((pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4).flatten(3)
pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2)
return pos
class PositionEmbeddingLearned(nn.Module):
"""
Absolute pos embedding, learned.
"""
def __init__(self, num_pos_feats=256):
super().__init__()
self.row_embed = nn.Embedding(50, num_pos_feats)
self.col_embed = nn.Embedding(50, num_pos_feats)
self.reset_parameters()
def reset_parameters(self):
nn.init.uniform_(self.row_embed.weight)
nn.init.uniform_(self.col_embed.weight)
def forward(self, tensor_list: NestedTensor):
x = tensor_list.tensors
h, w = x.shape[-2:]
i = torch.arange(w, device=x.device)
j = torch.arange(h, device=x.device)
x_emb = self.col_embed(i)
y_emb = self.row_embed(j)
pos = torch.cat([
x_emb.unsqueeze(0).repeat(h, 1, 1),
y_emb.unsqueeze(1).repeat(1, w, 1),
], dim=-1).permute(2, 0, 1).unsqueeze(0).repeat(x.shape[0], 1, 1, 1)
return pos
def build_position_encoding(args):
N_steps = args.hidden_dim // 2
if args.position_embedding in ('v2', 'sine'):
# TODO find a better way of exposing other arguments
position_embedding = PositionEmbeddingSine(N_steps, normalize=True)
elif args.position_embedding in ('v3', 'learned'):
position_embedding = PositionEmbeddingLearned(N_steps)
else:
raise ValueError(f"not supported {args.position_embedding}")
return position_embedding
| 38.489796
| 103
| 0.593849
|
79535e6f9ec8b54d1c30e03a35fae9edea3be7d0
| 8,583
|
py
|
Python
|
env/lib/python3.8/site-packages/plotly/graph_objs/layout/ternary/aaxis/title/_font.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 11,750
|
2015-10-12T07:03:39.000Z
|
2022-03-31T20:43:15.000Z
|
env/lib/python3.8/site-packages/plotly/graph_objs/layout/ternary/aaxis/title/_font.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,951
|
2015-10-12T00:41:25.000Z
|
2022-03-31T22:19:26.000Z
|
env/lib/python3.8/site-packages/plotly/graph_objs/layout/ternary/aaxis/title/_font.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,623
|
2015-10-15T14:40:27.000Z
|
2022-03-28T16:05:50.000Z
|
from plotly.basedatatypes import BaseLayoutHierarchyType as _BaseLayoutHierarchyType
import copy as _copy
class Font(_BaseLayoutHierarchyType):
# class properties
# --------------------
_parent_path_str = "layout.ternary.aaxis.title"
_path_str = "layout.ternary.aaxis.title.font"
_valid_props = {"color", "family", "size"}
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
Returns
-------
int|float
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
"""
def __init__(self, arg=None, color=None, family=None, size=None, **kwargs):
"""
Construct a new Font object
Sets this axis' title font. Note that the title's font used to
be customized by the now deprecated `titlefont` attribute.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.layout.ternary
.aaxis.title.Font`
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
Returns
-------
Font
"""
super(Font, self).__init__("font")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.layout.ternary.aaxis.title.Font
constructor must be a dict or
an instance of :class:`plotly.graph_objs.layout.ternary.aaxis.title.Font`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
if _v is not None:
self["family"] = _v
_v = arg.pop("size", None)
_v = size if size is not None else _v
if _v is not None:
self["size"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 37.480349
| 84
| 0.567401
|
79535ed1ccd50e1bdb203e7b1c2e87432da63a65
| 5,485
|
py
|
Python
|
tests/datasets/test_iteration_strategies.py
|
dk25021999/mmf
|
218057265a3fc175f656b5ebe8fb44ef5ccca2e9
|
[
"BSD-3-Clause"
] | 1,928
|
2020-05-07T19:00:53.000Z
|
2022-03-31T17:02:59.000Z
|
tests/datasets/test_iteration_strategies.py
|
dk25021999/mmf
|
218057265a3fc175f656b5ebe8fb44ef5ccca2e9
|
[
"BSD-3-Clause"
] | 914
|
2020-05-07T18:36:26.000Z
|
2022-03-31T05:45:26.000Z
|
tests/datasets/test_iteration_strategies.py
|
dk25021999/mmf
|
218057265a3fc175f656b5ebe8fb44ef5ccca2e9
|
[
"BSD-3-Clause"
] | 490
|
2020-05-07T20:05:10.000Z
|
2022-03-31T14:17:23.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
import unittest
from collections import Counter
import numpy as np
import torch
from mmf.datasets import iteration_strategies
from tests.test_utils import NumbersDataset
class TestIterationStrategies(unittest.TestCase):
NUM_DATALOADERS = 5
def setUp(self):
np.random.seed(1234)
def _build_dataloaders(self):
dataloaders = {}
for idx in range(self.NUM_DATALOADERS):
dataloaders[f"numbers_{idx}"] = torch.utils.data.DataLoader(
dataset=NumbersDataset((idx + 1) * (10 ** idx)), num_workers=0
)
return dataloaders
def test_constant_iteration_strategy(self):
dataloaders = self._build_dataloaders()
strategy = iteration_strategies.ConstantIterationStrategy.from_params(
dataloaders=dataloaders
)
counter = Counter()
count = 100
for _ in range(count):
counter[strategy()] += 1
self.assertEqual(counter[0], count)
for idx in range(1, self.NUM_DATALOADERS):
self.assertEqual(counter[idx], 0)
strategy = iteration_strategies.ConstantIterationStrategy.from_params(
dataloaders=dataloaders, idx=1
)
counter = Counter()
count = 100
for _ in range(count):
counter[strategy()] += 1
self.assertEqual(counter[1], count)
for idx in range(0, self.NUM_DATALOADERS):
if idx != 1:
self.assertEqual(counter[idx], 0)
def test_round_robin_strategy(self):
dataloaders = self._build_dataloaders()
strategy = iteration_strategies.RoundRobinIterationStrategy.from_params(
dataloaders=dataloaders
)
counter = Counter()
count = 100
for _ in range(count):
counter[strategy()] += 1
for idx in range(0, self.NUM_DATALOADERS):
self.assertEqual(counter[idx], count // self.NUM_DATALOADERS)
strategy = iteration_strategies.RoundRobinIterationStrategy.from_params(
dataloaders=dataloaders, start_idx=2
)
counter = Counter()
count = 100
for _ in range(count):
counter[strategy()] += 1
for idx in range(0, self.NUM_DATALOADERS):
self.assertEqual(counter[idx], count // self.NUM_DATALOADERS)
def test_random_strategy(self):
dataloaders = self._build_dataloaders()
strategy = iteration_strategies.RandomIterationStrategy.from_params(
dataloaders=dataloaders
)
counter = Counter()
count = 10000
for _ in range(count):
counter[strategy()] += 1
for idx in range(0, self.NUM_DATALOADERS):
self.assertTrue(counter[idx] <= 2100)
self.assertTrue(counter[idx] >= 1900)
def test_size_proportional_strategy(self):
dataloaders = self._build_dataloaders()
strategy = iteration_strategies.SizeProportionalIterationStrategy.from_params(
dataloaders=dataloaders
)
counter = Counter()
count = 10000
for _ in range(count):
counter[strategy()] += 1
for idx in range(0, self.NUM_DATALOADERS):
self.assertTrue(counter[idx] <= 10 ** idx)
lower_limit = 10 ** (idx - 1)
if idx == 0:
lower_limit = 0
self.assertTrue(counter[idx] >= lower_limit)
def test_ratios_strategy(self):
dataloaders = self._build_dataloaders()
sampling_ratios = {}
# Constant
for idx in range(self.NUM_DATALOADERS):
sampling_ratios[f"numbers_{idx}"] = 0
sampling_ratios["numbers_0"] = 1
strategy = iteration_strategies.RatiosIterationStrategy.from_params(
dataloaders, sampling_ratios=sampling_ratios
)
counter = Counter()
count = 10000
for _ in range(count):
counter[strategy()] += 1
self.assertEqual(counter[0], count)
for idx in range(1, self.NUM_DATALOADERS):
self.assertEqual(counter[idx], 0)
for idx in range(self.NUM_DATALOADERS):
sampling_ratios[f"numbers_{idx}"] = 1.0 / self.NUM_DATALOADERS
strategy = iteration_strategies.RatiosIterationStrategy.from_params(
dataloaders, sampling_ratios=sampling_ratios
)
count = 10000
counter = Counter()
for _ in range(count):
counter[strategy()] += 1
for idx in range(0, self.NUM_DATALOADERS):
self.assertTrue(counter[idx] <= 2100)
self.assertTrue(counter[idx] >= 1900)
lens = sum(len(loader.dataset) for loader in dataloaders.values())
for idx in range(self.NUM_DATALOADERS):
sampling_ratios[f"numbers_{idx}"] = (
len(dataloaders[f"numbers_{idx}"].dataset) / lens
)
strategy = iteration_strategies.RatiosIterationStrategy.from_params(
dataloaders, sampling_ratios=sampling_ratios
)
count = 10000
counter = Counter()
for _ in range(count):
counter[strategy()] += 1
for idx in range(0, self.NUM_DATALOADERS):
self.assertTrue(counter[idx] <= 10 ** idx)
lower_limit = 10 ** (idx - 1)
if idx == 0:
lower_limit = 0
self.assertTrue(counter[idx] >= lower_limit)
| 32.076023
| 86
| 0.606746
|
79535f80d40456dd7fea9454aae2e12317442684
| 1,745
|
py
|
Python
|
hermione/module_templates/__IMPLEMENTED_BASE__/src/ml/analysis/vif.py
|
RodrigoATorres/hermione
|
6cbed73e309f8025a48f33165d8f29561c6a3cc7
|
[
"Apache-2.0"
] | 183
|
2020-06-03T22:43:14.000Z
|
2022-03-17T22:39:07.000Z
|
hermione/module_templates/__IMPLEMENTED_BASE__/src/ml/analysis/vif.py
|
RodrigoATorres/hermione
|
6cbed73e309f8025a48f33165d8f29561c6a3cc7
|
[
"Apache-2.0"
] | 31
|
2020-06-03T22:55:18.000Z
|
2022-03-27T20:06:17.000Z
|
hermione/module_templates/__IMPLEMENTED_BASE__/src/ml/analysis/vif.py
|
RodrigoATorres/hermione
|
6cbed73e309f8025a48f33165d8f29561c6a3cc7
|
[
"Apache-2.0"
] | 43
|
2020-06-03T22:45:03.000Z
|
2021-12-29T19:43:54.000Z
|
import pandas as pd
from statsmodels.stats.outliers_influence import variance_inflation_factor
class VIF:
@classmethod
def analyze(cls, df: pd.DataFrame, thresh=5.0, verbose=True):
"""
Multicollinearity analysis
Parameters
----------
df : pd.DataFrame
Dataframe must have the following columns (x, y, cluster)
thresh : int
value of cut
verbose : bool
if true prints possible variables to be removed
Return
-------
pd.DataFrame
"""
variables = list(range(df.shape[1]))
dropped = True
while dropped:
dropped = False
vif = [variance_inflation_factor(df.iloc[:, variables].values, ix)
for ix in range(df.iloc[:, variables].shape[1])]
maxloc = vif.index(max(vif))
if max(vif) > thresh:
m = max(vif)
index_max = [i for i, j in enumerate(vif) if j == m]
if verbose:
cols_possibles_remove = [str(df.iloc[:, variables].columns[i]) for i in index_max]
print("Columns that can be removed -> " + ", ".join(cols_possibles_remove))
print("------")
print('dropping \'' + str(df.iloc[:, variables].columns[maxloc]) +
'\' at index: ' + str(maxloc))
print("_____________________________________________________________")
del variables[maxloc]
dropped = True
print('Remaining variables:')
print(df.columns[variables])
return df.iloc[:, variables]
| 36.354167
| 102
| 0.515186
|
795360402ea609c7975d15f20573afd238cdcea1
| 1,661
|
py
|
Python
|
src/harrastuspassi/harrastuspassi/management/commands/import_categories_from_owl.py
|
seitztimo/harrastuspassi-backend
|
6933471de644344e1a845e44859112c5b92b0ced
|
[
"MIT"
] | null | null | null |
src/harrastuspassi/harrastuspassi/management/commands/import_categories_from_owl.py
|
seitztimo/harrastuspassi-backend
|
6933471de644344e1a845e44859112c5b92b0ced
|
[
"MIT"
] | null | null | null |
src/harrastuspassi/harrastuspassi/management/commands/import_categories_from_owl.py
|
seitztimo/harrastuspassi-backend
|
6933471de644344e1a845e44859112c5b92b0ced
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from owlready2 import get_ontology
from harrastuspassi.models import HobbyCategory
ROOT_CLASS_LABEL = 'Harrastusmuoto'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('path_to_owl_file')
def handle(self, *args, **options):
ontology = get_ontology(options['path_to_owl_file']).load()
root_class = ontology.search_one(label=ROOT_CLASS_LABEL)
self.depth = 0
self.add_subclasses_as_categories(root_class)
def add_subclasses_as_categories(self, parent_class, parent_hobby_category=None):
for subclass in parent_class.subclasses():
[origin_id] = subclass.identifier or ['']
if not origin_id:
data_source = ''
else:
data_source = 'yso'
labels = subclass.label
name_fi, name_sv, name_en = '', '', ''
for label in labels:
label_lang = getattr(label, 'lang', 'fi')
if label_lang == 'fi':
name_fi = label
elif label_lang == 'sv':
name_sv = label
elif label_lang == 'en':
name_en = label
hobby_category, created = HobbyCategory.objects.update_or_create(
name_fi=name_fi,
parent=parent_hobby_category,
defaults={
'name_sv': name_sv,
'name_en': name_en,
'data_source': data_source,
'origin_id': origin_id
}
)
indent = '--' * self.depth
self.stdout.write(f'{indent}fi_{name_fi}, sv_{name_sv}, en_{name_en}')
self.depth += 1
self.add_subclasses_as_categories(subclass, hobby_category)
self.depth -= 1
| 30.2
| 83
| 0.646598
|
795361279438202e862092ff8650d6a835678b33
| 1,610
|
py
|
Python
|
experimenting/temp_contour.py
|
Shom770/data-science-projects
|
a85ef8c73fbee66ac060414716e2b0440772f07f
|
[
"MIT"
] | null | null | null |
experimenting/temp_contour.py
|
Shom770/data-science-projects
|
a85ef8c73fbee66ac060414716e2b0440772f07f
|
[
"MIT"
] | null | null | null |
experimenting/temp_contour.py
|
Shom770/data-science-projects
|
a85ef8c73fbee66ac060414716e2b0440772f07f
|
[
"MIT"
] | null | null | null |
import logging
import cartopy.crs as ccrs
import cartopy.feature as cfeature
import numpy as np
import matplotlib.pyplot as plt
from netCDF4 import Dataset
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
data_nam = Dataset(
(
f"http://nomads.ncep.noaa.gov/dods/gfs_0p25/"
f"gfs20220423/gfs_0p25_12z"
)
)
logger.info("Loaded NAM dataset")
lons = data_nam.variables["lon"][:]
print(lons)
lats = data_nam.variables["lat"][:]
extent = (-80, -74, 37, 40)
slo, elo = extent[:2]
sla, ela = extent[2:]
fig: plt.Figure = plt.figure()
ax: plt.Axes = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())
ax.set_extent(extent)
ax.add_feature(cfeature.LAND.with_scale("10m"))
ax.add_feature(cfeature.OCEAN.with_scale("10m"))
ax.add_feature(cfeature.STATES.with_scale("10m"))
home_lat = np.where(
np.logical_and(np.greater_equal(lats, sla), np.less_equal(lats, ela))
)[0]
all_lats = np.array([lats[lat] for lat in home_lat])
home_lon = np.where(
np.logical_and(np.greater_equal(lons, slo), np.less_equal(lons, elo))
)[0]
all_lons = np.array([lons[lon] for lon in home_lon])
temp_nam = data_nam.variables["crainsfc"][84 / 3]
temps = np.array([[temp_nam[lat, lon] for lon in home_lon] for lat in home_lat])
lons_, lats_ = np.meshgrid(all_lons, all_lats)
levels = [0.01, 0.1, 0.2, 0.3, 0.4, 0.5, 1, 2, 3, 4, 5]
ticks = levels
CS = ax.contourf(lons_, lats_, temps, transform=ccrs.PlateCarree(), levels=levels, cmap="coolwarm")
ax.set_title("Temperature on April 15th at 21z")
fig.colorbar(CS, ticks=ticks, location="bottom")
plt.show()
| 25.967742
| 99
| 0.71118
|
7953613457162000ddcb7706afba1589d8f2a194
| 12,710
|
py
|
Python
|
tests/test_build.py
|
jpmckinney/pydata-sphinx-theme
|
f54466a646cbc084bd1bb4dacb3f3b196598b495
|
[
"BSD-3-Clause"
] | 1
|
2021-06-25T03:15:26.000Z
|
2021-06-25T03:15:26.000Z
|
tests/test_build.py
|
jpmckinney/pydata-sphinx-theme
|
f54466a646cbc084bd1bb4dacb3f3b196598b495
|
[
"BSD-3-Clause"
] | 10
|
2021-04-16T17:45:06.000Z
|
2021-06-26T20:59:17.000Z
|
tests/test_build.py
|
jpmckinney/pydata-sphinx-theme
|
f54466a646cbc084bd1bb4dacb3f3b196598b495
|
[
"BSD-3-Clause"
] | 1
|
2021-04-16T19:48:45.000Z
|
2021-04-16T19:48:45.000Z
|
import os
from pathlib import Path
from shutil import copytree
from bs4 import BeautifulSoup
from sphinx.testing.util import SphinxTestApp
from sphinx.testing.path import path as sphinx_path
import sphinx.errors
import pytest
path_tests = Path(__file__).parent
class SphinxBuild:
def __init__(self, app: SphinxTestApp, src: Path):
self.app = app
self.src = src
def build(self):
self.app.build()
assert self.warnings == "", self.status
return self
@property
def status(self):
return self.app._status.getvalue()
@property
def warnings(self):
return self.app._warning.getvalue()
@property
def outdir(self):
return Path(self.app.outdir)
def html_tree(self, *path):
path_page = self.outdir.joinpath(*path)
if not path_page.exists():
raise ValueError(f"{path_page} does not exist")
return BeautifulSoup(path_page.read_text("utf8"), "html.parser")
@pytest.fixture()
def sphinx_build_factory(make_app, tmp_path):
def _func(src_folder, **kwargs):
copytree(path_tests / "sites" / src_folder, tmp_path / src_folder)
app = make_app(
srcdir=sphinx_path(os.path.abspath((tmp_path / src_folder))), **kwargs
)
return SphinxBuild(app, tmp_path / src_folder)
yield _func
def test_build_html(sphinx_build_factory, file_regression):
"""Test building the base html template and config."""
sphinx_build = sphinx_build_factory("base") # type: SphinxBuild
# Basic build with defaults
sphinx_build.build()
assert (sphinx_build.outdir / "index.html").exists(), sphinx_build.outdir.glob("*")
index_html = sphinx_build.html_tree("index.html")
subpage_html = sphinx_build.html_tree("section1/index.html")
# Navbar structure
navbar = index_html.select("div#navbar-center")[0]
file_regression.check(navbar.prettify(), basename="navbar_ix", extension=".html")
# Sidebar structure
sidebar = index_html.select(".bd-sidebar")[0]
file_regression.check(sidebar.prettify(), basename="sidebar_ix", extension=".html")
# Sidebar subpage
sidebar = subpage_html.select(".bd-sidebar")[0]
file_regression.check(
sidebar.prettify(), basename="sidebar_subpage", extension=".html"
)
def test_toc_visibility(sphinx_build_factory):
# Test that setting TOC level visibility works as expected
confoverrides = {
"html_theme_options.show_toc_level": 2,
}
sphinx_build = sphinx_build_factory("base", confoverrides=confoverrides).build()
index_html = sphinx_build.html_tree("index.html")
# The 3rd level headers should be visible, but not the fourth-level
assert "visible" in index_html.select(".toc-h2 ul")[0].attrs["class"]
assert "visible" not in index_html.select(".toc-h3 ul")[0].attrs["class"]
def test_logo(sphinx_build_factory):
"""Test that the logo is shown by default, project title if no logo."""
sphinx_build = sphinx_build_factory("base").build()
# By default logo is shown
index_html = sphinx_build.html_tree("index.html")
assert index_html.select(".navbar-brand img")
assert not index_html.select(".navbar-brand")[0].text.strip()
def test_logo_name(sphinx_build_factory):
"""Test that the logo is shown by default, project title if no logo."""
confoverrides = {"html_logo": ""}
sphinx_build = sphinx_build_factory("base", confoverrides=confoverrides).build()
# if no logo is specified, use project title instead
index_html = sphinx_build.html_tree("index.html")
assert "PyData Tests" in index_html.select(".navbar-brand")[0].text.strip()
def test_favicons(sphinx_build_factory):
"""Test that arbitrary favicons are included."""
html_theme_options_favicons = {
"favicons": [
{
"rel": "icon",
"sizes": "16x16",
"href": "https://secure.example.com/favicon/favicon-16x16.png",
},
{
"rel": "icon",
"sizes": "32x32",
"href": "favicon-32x32.png",
},
{
"rel": "apple-touch-icon",
"sizes": "180x180",
"href": "apple-touch-icon-180x180.png",
},
]
}
confoverrides = {"html_theme_options": html_theme_options_favicons}
sphinx_build = sphinx_build_factory("base", confoverrides=confoverrides).build()
index_html = sphinx_build.html_tree("index.html")
icon_16 = (
'<link href="https://secure.example.com/favicon/favicon-16x16.png" '
'rel="icon" sizes="16x16"/>'
)
icon_32 = '<link href="_static/favicon-32x32.png" rel="icon" sizes="32x32"/>'
icon_180 = (
'<link href="_static/apple-touch-icon-180x180.png" '
'rel="apple-touch-icon" sizes="180x180"/>'
)
assert icon_16 in str(index_html.select("head")[0])
assert icon_32 in str(index_html.select("head")[0])
assert icon_180 in str(index_html.select("head")[0])
def test_sidebar_default(sphinx_build_factory):
"""The sidebar is shrunk when no sidebars specified in html_sidebars."""
sphinx_build = sphinx_build_factory("base").build()
index_html = sphinx_build.html_tree("page1.html")
assert "col-md-3" in index_html.select(".bd-sidebar")[0].attrs["class"]
def test_sidebar_disabled(sphinx_build_factory):
"""The sidebar is shrunk when no sidebars specified in html_sidebars."""
confoverrides = {"html_sidebars.page1": ""}
sphinx_build = sphinx_build_factory("base", confoverrides=confoverrides).build()
index_html = sphinx_build.html_tree("page1.html")
assert "col-md-1" in index_html.select(".bd-sidebar")[0].attrs["class"]
def test_navbar_align_default(sphinx_build_factory):
"""The navbar items align with the proper part of the page."""
sphinx_build = sphinx_build_factory("base").build()
index_html = sphinx_build.html_tree("index.html")
assert "col-lg-9" in index_html.select("div#navbar-collapsible")[0].attrs["class"]
def test_navbar_align_right(sphinx_build_factory):
"""The navbar items align with the proper part of the page."""
confoverrides = {"html_theme_options.navbar_align": "right"}
sphinx_build = sphinx_build_factory("base", confoverrides=confoverrides).build()
# Both the column alignment and the margin should be changed
index_html = sphinx_build.html_tree("index.html")
assert "col-lg-9" not in index_html.select("div#navbar-center")[0].attrs["class"]
assert "ml-auto" in index_html.select("div#navbar-center")[0].attrs["class"]
def test_navbar_no_in_page_headers(sphinx_build_factory, file_regression):
# https://github.com/pydata/pydata-sphinx-theme/issues/302
sphinx_build = sphinx_build_factory("test_navbar_no_in_page_headers").build()
index_html = sphinx_build.html_tree("index.html")
navbar = index_html.select("ul#navbar-main-elements")[0]
file_regression.check(navbar.prettify(), extension=".html")
def test_sidebars_captions(sphinx_build_factory, file_regression):
sphinx_build = sphinx_build_factory("sidebars").build()
subindex_html = sphinx_build.html_tree("section1/index.html")
# Sidebar structure with caption
sidebar = subindex_html.select("nav#bd-docs-nav")[0]
file_regression.check(sidebar.prettify(), extension=".html")
def test_sidebars_nested_page(sphinx_build_factory, file_regression):
sphinx_build = sphinx_build_factory("sidebars").build()
subindex_html = sphinx_build.html_tree("section1/subsection1/page1.html")
# For nested (uncollapsed) page, the label included `checked=""`
sidebar = subindex_html.select("nav#bd-docs-nav")[0]
file_regression.check(sidebar.prettify(), extension=".html")
def test_sidebars_single(sphinx_build_factory, file_regression):
confoverrides = {"templates_path": ["_templates_single_sidebar"]}
sphinx_build = sphinx_build_factory("sidebars", confoverrides=confoverrides).build()
index_html = sphinx_build.html_tree("index.html")
# No navbar included
assert not index_html.select("nav#navbar-main")
assert not index_html.select(".navbar-nav")
# Sidebar structure
sidebar = index_html.select("nav#bd-docs-nav")[0]
file_regression.check(sidebar.prettify(), extension=".html")
def test_sidebars_level2(sphinx_build_factory, file_regression):
confoverrides = {"templates_path": ["_templates_sidebar_level2"]}
sphinx_build = sphinx_build_factory("sidebars", confoverrides=confoverrides).build()
subindex_html = sphinx_build.html_tree("section1/subsection1/index.html")
# Sidebar structure
sidebar = subindex_html.select("nav#bd-docs-nav")[0]
file_regression.check(sidebar.prettify(), extension=".html")
def test_included_toc(sphinx_build_factory):
"""Test that Sphinx project containing TOC (.. toctree::) included
via .. include:: can be successfully built.
"""
# Regression test for bug resolved in #347.
# Tests mainly makes sure that the sphinx_build.build() does not raise exception.
# https://github.com/pydata/pydata-sphinx-theme/pull/347
sphinx_build = sphinx_build_factory("test_included_toc").build()
included_page_html = sphinx_build.html_tree("included-page.html")
assert included_page_html is not None
# html contexts for `show_edit_button`
# these are "good" context fragements that should yield a working link
good_edits = [
[
{
"github_user": "foo",
"github_repo": "bar",
"github_version": "HEAD",
"doc_path": "docs",
},
"https://github.com/foo/bar/edit/HEAD/docs/index.rst",
],
[
{
"gitlab_user": "foo",
"gitlab_repo": "bar",
"gitlab_version": "HEAD",
"doc_path": "docs",
},
"https://gitlab.com/foo/bar/-/edit/HEAD/docs/index.rst",
],
[
{
"bitbucket_user": "foo",
"bitbucket_repo": "bar",
"bitbucket_version": "HEAD",
"doc_path": "docs",
},
"https://bitbucket.org/foo/bar/src/HEAD/docs/index.rst?mode=edit",
],
]
# copy the "good" ones, ensure `doc_path` is agnostic to trailing slashes
slash_edits = [
[
{
# add slashes to doc_path:
key: f"{value}/" if key == "doc_path" else value
for key, value in html_context.items()
},
# the URL does not change
url,
]
for html_context, url in good_edits
]
# copy the "good" ones, provide a `<whatever>_url` based off the default
providers = [
[
dict(
# copy all the values
**html_context,
# add a provider url
**{f"{provider}_url": f"https://{provider}.example.com"},
),
f"""https://{provider}.example.com/foo/{url.split("/foo/")[1]}""",
]
for html_context, url in good_edits
for provider in ["gitlab", "bitbucket", "github"]
if provider in url
]
# missing any of the values should fail
bad_edits = [
[
{
# copy all the values
key: value
for key, value in html_context.items()
# but not `<provider>_version`
if "_version" not in key
},
None,
]
for html_context, url in good_edits
]
# a good custom URL template
good_custom = [
[
{
"edit_page_url_template": (
"https://dvcs.example.com/foo/bar/edit/HEAD/{{ file_name }}"
)
},
"https://dvcs.example.com/foo/bar/edit/HEAD/index.rst",
]
]
# a bad custom URL template
bad_custom = [
[
# it's missing a reference to {{ file_name }}
{"edit_page_url_template": "http://has-no-file-name"},
None,
]
]
all_edits = [
*good_edits,
*slash_edits,
*bad_edits,
*good_custom,
*bad_custom,
*providers,
]
@pytest.mark.parametrize("html_context,edit_url", all_edits)
def test_edit_page_url(sphinx_build_factory, html_context, edit_url):
confoverrides = {
"html_theme_options.use_edit_page_button": True,
"html_context": html_context,
}
sphinx_build = sphinx_build_factory("base", confoverrides=confoverrides)
if edit_url is None:
with pytest.raises(sphinx.errors.ThemeError):
sphinx_build.build()
return
sphinx_build.build()
index_html = sphinx_build.html_tree("index.html")
edit_link = index_html.select(".editthispage a")
assert edit_link, "no edit link found"
assert edit_link[0].attrs["href"] == edit_url, f"edit link didn't match {edit_link}"
| 32.757732
| 88
| 0.660346
|
79536246380a091f74e2df341bf9b605acc8dc5b
| 108,439
|
py
|
Python
|
cvpysdk/job.py
|
Jayesh-Jain/SDK
|
5fe5130b1eeacad9944ba34714e583c6c743482f
|
[
"Apache-2.0"
] | null | null | null |
cvpysdk/job.py
|
Jayesh-Jain/SDK
|
5fe5130b1eeacad9944ba34714e583c6c743482f
|
[
"Apache-2.0"
] | null | null | null |
cvpysdk/job.py
|
Jayesh-Jain/SDK
|
5fe5130b1eeacad9944ba34714e583c6c743482f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# pylint: disable=W0104, R0205, R1710
# --------------------------------------------------------------------------
# Copyright Commvault Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# --------------------------------------------------------------------------
"""Main file for performing operations on a job.
JobController: Class for managing jobs on this commcell
JobManagement: Class for performing Job Management operations
Job: Class for keeping track of a job and perform various operations on it.
JobController
=============
__init__(commcell_object) -- initializes the instance of JobController class associated
with the specified commcell
__str__() -- returns the string representation of the active jobs
on this commcell
__repr__() -- returns the string representation of the object of this class,
with the commcell it is associated with
_get_jobs_list() -- executes the request, and parses and returns the jobs response
_get_jobs_request_json(**options)
-- Returns the request json for the jobs request
_modify_all_jobs(operation_type=None)
-- executes a request on the server to suspend/resume/kill all
the jobs on the commserver.
all_jobs() -- returns all the jobs on this commcell
active_jobs() -- returns the dict of active jobs and their details
finished_jobs() -- retutns the dict of finished jobs and their details
get() -- returns the Job class instance for the given job id
kill_all_jobs() -- Kills all jobs on the commcell
resume_all_jobs() -- Resumes all jobs on the commcell
suspend_all_jobs() -- Suspends all jobs on the commcell
JobManagement
==============
__init__(commcell_object) -- initialise object of the JobManagement class
_set_jobmanagement_settings() -- sets the jobmanagement settings
_refresh() -- refresh the job management settings
set_general_settings(settings) -- sets the general settings of job management
set_priority_settings(settings) -- sets the priority settings of job management
set_restart_settings(settings) -- sets the restart settings of job management
set_update_settings(settings) -- sets the update settings of job management
job_priority_precedence -- gets the job priority precedence
job_priority_precedence(priority_type) -- sets the job priority precedence property
start_phase_retry_interval -- gets the start phase retry interval in
(minutes)
start_phase_retry_interval(minutes) -- sets the start phase retry interval property
state_update_interval_for_continuous_data_replicator -- gets the start phase retry interval in
(minutes)
state_update_interval_for_continuous_data_replicator(minutes) -- sets the state update interval for continuous
data replicator
allow_running_jobs_to_complete_past_operation_window -- gets the allow running jobs to complete past
operation window(True/False)
allow_running_jobs_to_complete_past_operation_window(flag) -- sets the allow running jobs to complete past
operation window
job_alive_check_interval_in_minutes -- gets the job alive check interval in (minutes)
job_alive_check_interval_in_minutes(minutes) -- sets the job alive check interval in minutes
queue_scheduled_jobs -- gets the queue scheduled jobs(True/False)
queue_scheduled_jobs(flags) -- sets the queue scheduled jobs
enable_job_throttle_at_client_level -- gets the enable job throttle at client level
(True/False)
enable_job_throttle_at_client_level(flag) -- sets the enable job throttle at client level
enable_multiplexing_for_db_agents -- gets the enable multiplexing for db agents
(True/False)
enable_multiplexing_for_db_agents(flag) -- sets the enable multiplexing for db agents
queue_jobs_if_conflicting_jobs_active -- gets the queue jobs if conflicting jobs active
(True/False)
queue_jobs_if_conflicting_jobs_active(flag) -- sets the queue jobs if conflicting jobs active
queue_jobs_if_activity_disabled -- gets the queue jobs if activity disabled
(True/False)
queue_jobs_if_activity_disabled(flag) -- sets the queue jobs if activity disabled
backups_preempts_auxilary_copy -- gets the backups preempts auxilary copy
(True/False)
backups_preempts_auxilary_copy(flag) -- sets the backups preempts auxilary copy
restore_preempts_other_jobs -- gets the restore preempts other jobs
(True/False)
restore_preempts_other_jobs(flag) -- sets the restore preempts other jobs
enable_multiplexing_for_oracle -- gets the enable multiplexing for oracle
(True/False)
enable_multiplexing_for_oracle(flag) -- sets the enable multiplexing for oracle
job_stream_high_water_mark_level -- gets the job stream high water mark level
job_stream_high_water_mark_level(level) -- sets the job stream high water mark level
backups_preempts_other_backups -- gets the backups preempts other backups
(True/False)
backups_preempts_other_backups(flag) -- sets the backups preempts other backups
do_not_start_backups_on_disabled_client -- gets the do not start backups on
disabled client(True/False)
do_not_start_backups_on_disabled_client(flag) -- sets the do not start backups
on disabled client
get_restart_setting(jobtype) -- gets the restart settings of a specific
jobtype
get_priority_setting(jobtype) -- gets the priority setting of a specific
jobtype
get_update_setting(jobtype) -- gets the update settings of a specific
jobtype
get_restart_settings -- gets the restart settings of job management
get_priority_settings -- gets the priority settings of job management
get_update_settings -- gets the update settings of job management
Job
===
__init__() -- initializes the instance of Job class associated with the
specified commcell of job with id: 'job_id'
__repr__() -- returns the string representation of the object of this class,
with the job id it is associated with
_is_valid_job() -- checks if the job with the given id is a valid job or not
_get_job_summary() -- gets the summary of the job with the given job id
_get_job_details() -- gets the details of the job with the given job id
_initialize_job_properties()-- initializes the properties of the job
_wait_for_status() -- waits for 2 minutes or till the job status is changed
to given status, whichever is earlier
wait_for_completion() -- waits for the job to finish, (job.is_finished == True)
is_finished() -- checks for the status of the job.
Returns True if finished, else False
pause() -- suspend the job
resume() -- resumes the job
resubmit() -- to resubmit the job
kill() -- kills the job
refresh() -- refresh the properties of the Job
advanced_job_details() -- Returns advanced properties for the job
Job instance Attributes
-----------------------
**job.is_finished** -- specifies whether the job is finished or not (True / False)
**job.client_name** -- returns the name of the client, job is running for
**job.agent_name** -- returns the name of the agent, job is running for
**job.instance_name** -- returns the name of the instance, job is running for
**job.backupset_name** -- returns the name of the backupset, job is running for
**job.subclient_name** -- returns the name of the subclient, job is running for
**job.status** -- returns the current status of the job
(Completed / Suspended / Waiting / ... / etc.)
**job.job_id** -- returns the id of the job
**job.job_type** -- returns the type of the job
**job.backup_level** -- returns the backup level (if applicable), otherwise None
**job.start_time** -- returns the start time of the job
**job.end_time** -- returns the end time of the job
**job.delay_reason** -- reason why the job was delayed
**job.pending_reason** -- reason if job went into pending state
**job.phase** -- returns the current phase of the job
**job.summary** -- returns the dictionary consisting of the full summary of the job
**job.username** -- returns the username with which the job started
**job.userid** -- returns the userid with which the job started
**job.details** -- returns the dictionary consisting of the full details of the job
**job.num_of_files_transferred** -- returns the current number of files transferred for the job.
**job.state** -- returns the current state of the job.
ErrorRule
=========
_get_xml_for_rule() -- Returns the XML for a given rule's dictionary of key value pairs.
add_error_rule() -- Add new error rules as well as update existing rules.
_modify_job_status_on_errors() -- Internally used to enable or disable job status on errors.
enable() -- Enable an error rule for a specific iDA using _modify_job_status_on_errors.
disable() -- Disable an error rule for a specific iDA using _modify_job_status_on_errors.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import time
import copy
from .exception import SDKException
from .constants import AdvancedJobDetailType, ApplicationGroup
class JobController(object):
"""Class for controlling all the jobs associated with the commcell."""
def __init__(self, commcell_object):
"""Initialize instance of the JobController class to get the details of Commcell Jobs.
Args:
commcell_object (object) -- instance of Commcell class to get the jobs of
Returns:
None
"""
self._commcell_object = commcell_object
self._cvpysdk_object = commcell_object._cvpysdk_object
self._services = commcell_object._services
self._update_response_ = commcell_object._update_response_
def __str__(self):
"""Representation string consisting of all active jobs on this commcell.
Returns:
str - string of all the active jobs on this commcell
"""
jobs_dict = self.active_jobs()
representation_string = '{:^5}\t{:^25}\t{:^20}\t{:^20}\t{:^20}\t{:^20}\t{:^20}\n\n'.format(
'Job ID', 'Operation', 'Status', 'Agent type', 'Job type', 'Progress', 'Pending Reason'
)
for job in jobs_dict:
sub_str = '{:^5}\t{:25}\t{:20}\t{:20}\t{:20}\t{:20}%\t{:^20}\n'.format(
job,
jobs_dict[job]['operation'],
jobs_dict[job]['status'],
jobs_dict[job]['app_type'],
jobs_dict[job]['job_type'],
jobs_dict[job]['percent_complete'],
jobs_dict[job]['pending_reason']
)
representation_string += sub_str
return representation_string.strip()
def __repr__(self):
"""Representation string for the instance of the JobController class."""
return "JobController class instance for Commcell: '{0}'".format(
self._commcell_object.commserv_name
)
def _get_jobs_request_json(self, **options):
"""Returns the request json for the jobs request
Args:
options (dict) -- dict of key-word arguments
Available Options:
category (str) -- category name for which the list of jobs
are to be retrieved
Valid Values:
- ALL
- ACTIVE
- FINISHED
default: ALL
limit (int) -- total number of jobs list that are to be returned
default: 20
lookup_time (int) -- list of jobs to be retrieved which are specified
hours older
default: 5 hours
show_aged_job (bool) -- boolean specifying whether to include aged jobs in
the result or not
default: False
clients_list (list) -- list of clients to return the jobs for
default: []
job_type_list (list) -- list of job operation types
default: []
Returns:
dict - request json that is to be sent to server
"""
job_list_category = {
'ALL': 0,
'ACTIVE': 1,
'FINISHED': 2
}
for client in options.get('clients_list', []):
if not self._commcell_object.clients.has_client(client):
raise SDKException('Job', '102', 'No client with name {0} exists.'.format(client))
request_json = {
"scope": 1,
"category": job_list_category[options.get('category', 'ALL')],
"pagingConfig": {
"sortDirection": 1,
"offset": 0,
"sortField": "jobId",
"limit": options.get('limit', 20)
},
"jobFilter": {
"completedJobLookupTime": int(options.get('lookup_time', 5) * 60 * 60),
"showAgedJobs": options.get('show_aged_jobs', False),
"clientList": [
{
"clientId": int(self._commcell_object.clients.all_clients[client.lower()]['id'])
} for client in options.get('clients_list', [])
],
"jobTypeList": [
job_type for job_type in options.get('job_type_list', [])
]
}
}
return request_json
def _get_jobs_list(self, **options):
"""Executes a request on the server to get the list of jobs.
Args:
request_json (dict) -- request that is to be sent to server
Returns:
dict - dict containing details about all the retrieved jobs
Raises:
SDKException:
if response is empty
if response is not success
"""
request_json = self._get_jobs_request_json(**options)
flag, response = self._cvpysdk_object.make_request(
'POST', self._services['ALL_JOBS'], request_json
)
jobs_dict = {}
if flag:
try:
if response.json():
if 'jobs' in response.json():
all_jobs = response.json()['jobs']
for job in all_jobs:
if 'jobSummary' in job and job['jobSummary']['isVisible'] is True:
job_summary = job['jobSummary']
job_id = job_summary['jobId']
if options.get('job_summary', '').lower() == 'full':
jobs_dict[job_id] = job_summary
else:
status = job_summary['status']
operation = job_summary['localizedOperationName']
percent_complete = job_summary['percentComplete']
backup_level = job_summary.get('backupLevelName')
app_type = ''
job_type = ''
pending_reason = ''
subclient_id = ''
if 'appTypeName' in job_summary:
app_type = job_summary['appTypeName']
if 'jobType' in job_summary:
job_type = job_summary['jobType']
if 'pendingReason' in job_summary:
pending_reason = job_summary['pendingReason']
if 'subclient' in job_summary:
job_subclient = job_summary['subclient']
if 'subclientId' in job_subclient:
subclient_id = job_subclient['subclientId']
jobs_dict[job_id] = {
'operation': operation,
'status': status,
'app_type': app_type,
'job_type': job_type,
'percent_complete': percent_complete,
'pending_reason': pending_reason,
'subclient_id': subclient_id,
'backup_level': backup_level
}
return jobs_dict
else:
raise SDKException('Response', '102')
except ValueError:
raise SDKException('Response', '102', 'Please check the inputs.')
else:
response_string = self._update_response_(response.text)
raise SDKException('Response', '101', response_string)
def _modify_all_jobs(self, operation_type=None):
""" Executes a request on the server to suspend/resume/kill all the jobs on the commserver
Args:
operation_type (str) -- All jobs on commcell will be changed to this
state.
Options:
suspend/resume/kill
Returns:
None
Raises:
SDKException:
- Invalid input is passed to the module
- Failed to execute the api to modify jobs
- Response is incorrect
"""
job_map = {
'suspend': 'JOB_SUSPEND',
'resume': 'JOB_RESUME',
'kill': 'JOB_KILL'
}
if operation_type not in job_map:
raise SDKException('Job', '102', 'Invalid input')
request_json = {
"JobManager_PerformMultiCellJobOpReq": {
"jobOpReq": {
"operationType": job_map[operation_type]
},
"message": "ALL_JOBS",
"operationDescription": "All jobs"
}
}
response = self._commcell_object._qoperation_execute(request_json)
if 'error' in response:
error_code = response['error'].get('errorCode')
if error_code != 0:
if 'errLogMessage' in response['error']:
error_message = "Failed to {0} all jobs with error: [{1}]".format(
operation_type, response['error']['errLogMessage']
)
raise SDKException(
'Job',
'102',
'Error Code:"{0}"\nError Message: "{1}"'.format(error_code, error_message)
)
else:
raise SDKException('Job',
'102',
"Failed to {0} all jobs".format(operation_type))
else:
raise SDKException('Response', '102')
def all_jobs(self, client_name=None, lookup_time=5, job_filter=None, **options):
"""Returns the dict consisting of all the jobs executed on the Commcell within the number
of hours specified in lookup time value.
Args:
client_name (str) -- name of the client to filter out the jobs for
default: None, get all the jobs
lookup_time (int) -- get all the jobs executed within the number of hours
default: 5 Hours
job_filter (str) -- type of jobs to filter
for multiple filters, give the values **comma(,)** separated
List of Possible Values:
Backup
Restore
AUXCOPY
WORKFLOW
etc..
http://documentation.commvault.com/commvault/v11/article?p=features/rest_api/operations/get_job.htm
to get the complete list of filters available
default: None
options (dict) -- dict of key-word arguments
Available Options:
limit (int) -- total number of jobs list that are to be returned
default: 20
show_aged_job (bool) -- boolean specifying whether to include aged jobs in
the result or not
default: False
clients_list (list) -- list of clients to return the jobs for
default: []
job_type_list (list) -- list of job operation types
default: []
job_summary (str) -- To return the basic job summary or full job summary
default: basic
accepted values: ['basic', 'full']
Returns:
dict - dictionary consisting of the job IDs matching the given criteria
as the key, and their details as its value
Raises:
SDKException:
if client name is given, and no client exists with the given name
"""
options['category'] = 'ALL'
options['lookup_time'] = lookup_time
if job_filter:
options['job_type_list'] = options.get('job_type_list', []) + job_filter.split(',')
if client_name:
options['clients_list'] = options.get('clients_list', []) + [client_name]
return self._get_jobs_list(**options)
def active_jobs(self, client_name=None, lookup_time=1, job_filter=None, **options):
"""Returns the dict consisting of all the active jobs currently being executed on the
Commcell within the number of hours specified in lookup time value.
Args:
client_name (str) -- name of the client to filter out the jobs for
default: None, get all the jobs
lookup_time (int) -- get all the jobs executed within the number of hours
default: 1 Hour(s)
job_filter (str) -- type of jobs to filter
for multiple filters, give the values **comma(,)** separated
List of Possible Values:
Backup
Restore
AUXCOPY
WORKFLOW
etc..
http://documentation.commvault.com/commvault/v11/article?p=features/rest_api/operations/get_job.htm
to get the complete list of filters available
default: None
options (dict) -- dict of key-word arguments
Available Options:
limit (int) -- total number of jobs list that are to be returned
default: 20
show_aged_job (bool) -- boolean specifying whether to include aged jobs in
the result or not
default: False
clients_list (list) -- list of clients to return the jobs for
default: []
job_type_list (list) -- list of job operation types
default: []
job_summary (str) -- To return the basic job summary or full job summary
default: basic
accepted values: ['basic', 'full']
Returns:
dict - dictionary consisting of the job IDs matching the given criteria
as the key, and their details as its value
Raises:
SDKException:
if client name is given, and no client exists with the given name
"""
options['category'] = 'ACTIVE'
options['lookup_time'] = lookup_time
if job_filter:
options['job_type_list'] = options.get('job_type_list', []) + job_filter.split(',')
if client_name:
options['clients_list'] = options.get('clients_list', []) + [client_name]
return self._get_jobs_list(**options)
def finished_jobs(self, client_name=None, lookup_time=24, job_filter=None, **options):
"""Returns the dict consisting of all the finished jobs on the Commcell within the number
of hours specified in lookup time value.
Args:
client_name (str) -- name of the client to filter out the jobs for
default: None, get all the jobs ir-respective of client
lookup_time (int) -- get all the jobs executed within the number of hours
default: 24 Hours
job_filter (str) -- type of jobs to filter
for multiple filters, give the values **comma(,)** separated
List of Possible Values:
Backup
Restore
AUXCOPY
WORKFLOW
etc..
http://documentation.commvault.com/commvault/v11/article?p=features/rest_api/operations/get_job.htm
to get the complete list of filters available
default: None
options (dict) -- dict of key-word arguments
Available Options:
limit (int) -- total number of jobs list that are to be returned
default: 20
show_aged_job (bool) -- boolean specifying whether to include aged jobs in
the result or not
default: False
clients_list (list) -- list of clients to return the jobs for
default: []
job_type_list (list) -- list of job operation types
default: []
job_summary (str) -- To return the basic job summary or full job summary
default: basic
accepted values: ['basic', 'full']
Returns:
dict - dictionary consisting of the job IDs matching the given criteria
as the key, and their details as its value
Raises:
SDKException:
if client name is given, and no client exists with the given name
"""
options['category'] = 'FINISHED'
options['lookup_time'] = lookup_time
if job_filter:
options['job_type_list'] = options.get('job_type_list', []) + job_filter.split(',')
if client_name:
options['clients_list'] = options.get('clients_list', []) + [client_name]
return self._get_jobs_list(**options)
def suspend_all_jobs(self):
""" Suspends all the jobs on the commserver """
self._modify_all_jobs('suspend')
def resume_all_jobs(self):
""" Resumes all the jobs on the commserver """
self._modify_all_jobs('resume')
def kill_all_jobs(self):
""" Kills all the jobs on the commserver """
self._modify_all_jobs('kill')
def get(self, job_id):
"""Returns the job object for the given job id.
Args:
job_id (int) -- id of the job to create Job class instance for
Returns:
object - Job class object for the given job id
Raises:
SDKException:
if no job with specified job id exists
"""
return Job(self._commcell_object, job_id)
class JobManagement(object):
"""Class for performing job management operations. """
def __init__(self, commcell_object):
"""
Initialize instance of JobManagement class for performing operations on jon management settings.
Args:
commcell_object (object) -- instance of Commcell class.
Returns:
None
"""
self._comcell = commcell_object
self._service = commcell_object._services.get('JOB_MANAGEMENT_SETTINGS')
self._cvpysdk_object = commcell_object._cvpysdk_object
self._error_rules = None
self.refresh()
@property
def error_rules(self):
if not self._error_rules:
self._error_rules = _ErrorRule(self._comcell)
return self._error_rules
def _set_jobmanagement_settings(self):
"""
Executes a request on the server, to set the job management settings.
Returns:
None
Raises:
SDKException:
if given inputs are invalid
"""
flag, response = self._cvpysdk_object.make_request(method='POST', url=self._service,
payload=self._settings_dict)
if flag:
if response and response.json():
if response.json().get('errorCode', 0) != 0:
raise SDKException('Job', '102', 'Failed to set job management properties. \nError: {0}'.format(
response.json().get('errorMessage', '')))
self.refresh()
else:
raise SDKException('Response', '101', response.json()["errorMessage"])
def _get_jobmanagement_settings(self):
"""
Executes a request on the server to get the settings of job management.
Returns:
None
Raises:
SDKException
if response is empty
if response is not success
"""
flag, response = self._cvpysdk_object.make_request(method='GET', url=self._service)
if flag:
if response and response.json():
self._settings_dict = response.json()
if self._settings_dict.get('errorCode', 0) != 0:
raise SDKException('Job', '102', 'Failed to get job management properties. \nError: {0}'.format(
self._settings_dict.get('errorMessage', '')))
if 'jobManagementSettings' in self._settings_dict:
self._restart_settings = {'jobRestartSettings': self._settings_dict.get(
'jobManagementSettings').get('jobRestartSettings', {})}
self._priority_settings = {'jobPrioritySettings': self._settings_dict.get(
'jobManagementSettings').get('jobPrioritySettings', {})}
self._general_settings = {'generalSettings': self._settings_dict.get(
'jobManagementSettings').get('generalSettings', {})}
self._update_settings = {'jobUpdatesSettings': self._settings_dict.get(
'jobManagementSettings').get('jobUpdatesSettings', {})}
else:
raise SDKException('Response', '102')
else:
raise SDKException('Response', '102')
else:
response_string = self._comcell._update_response_(response.text)
raise SDKException('Response', '101', response_string)
def refresh(self):
"""
calls the private method _get_jobmanagement_settings()
"""
self._restart_settings = None
self._general_settings = None
self._update_settings = None
self._priority_settings = None
self._get_jobmanagement_settings()
def set_general_settings(self, settings):
"""
sets general settings of job management.
Note : dedicated setters and getters are provided for general settings.
Args:
settings (dict) -- Following key/value pairs can be set.
{
"allowRunningJobsToCompletePastOperationWindow": False,
"jobAliveCheckIntervalInMinutes": 5,
"queueScheduledJobs": False,
"enableJobThrottleAtClientLevel": False,
"enableMultiplexingForDBAgents": False,
"queueJobsIfConflictingJobsActive": False,
"queueJobsIfActivityDisabled": False,
"backupsPreemptsAuxilaryCopy": False,
"restorePreemptsOtherJobs": False,
"enableMultiplexingForOracle": False,
"jobStreamHighWaterMarkLevel": 500,
"backupsPreemptsOtherBackups": False,
"doNotStartBackupsOnDisabledClient": False
}
Returns:
None
Raises:
SDKException:
if input is not valid type
"""
if isinstance(settings, dict):
self._general_settings.get('generalSettings').update(settings)
self._set_jobmanagement_settings()
else:
raise SDKException('Job', '108')
def set_priority_settings(self, settings):
"""
sets priority settings for jobs and agents type.
Args:
settings (list) -- list of dictionaries with following format.
[
{
"type_of_operation": 1,
"combinedPriority": 10,
"jobTypeName": "Information Management"
},
{
"type_of_operation": 2,
"combinedPriority": 10,
"appTypeName": "Windows File System"
},
{
"type_of_operation": 1,
"combinedPriority": 10,
"jobTypeName": "Auxiliary Copy"
}
]
We have priority settings fro jobtype and agenttype
NOTE : for setting, priority for jobtype the 'type_of_operation' must be set to 1 and name of the job type
must be specified as below format.
ex :- "jobTypeName": "Information Management"
NOTE : for setting, priority for agenttype the 'type_of_operation' must be set to 2 and name of the job
type must be specified as below format
ex :- "appTypeName": "Windows File System"
Returns:
None
Raises:
SDKException:
if input is not valid type
"""
if isinstance(settings, list):
for job in settings:
if job["type_of_operation"] == 1:
for job_type in self._priority_settings['jobPrioritySettings']['jobTypePriorityList']:
if job_type['jobTypeName'] == job.get("jobTypeName"):
job.pop("jobTypeName")
job.pop("type_of_operation")
job_type.update(job)
break
elif job["type_of_operation"] == 2:
for job_type in self._priority_settings['jobPrioritySettings']['agentTypePriorityList']:
if job_type['agentTypeEntity']['appTypeName'] == job.get("appTypeName"):
job.pop("appTypeName")
job.pop("type_of_operation")
job_type.update(job)
break
self._set_jobmanagement_settings()
else:
raise SDKException('Job', '108')
def set_restart_settings(self, settings):
"""
sets restart settings for jobs.
Args:
settings (list) -- list of dictionaries with following format
[
{
"killRunningJobWhenTotalRunningTimeExpires": False,
"maxRestarts": 10,
"enableTotalRunningTime": False,
"restartable": False,
"jobTypeName": "File System and Indexing Based (Data Protection)",
"restartIntervalInMinutes": 20,
"preemptable": True,
"totalRunningTime": 21600,
"jobType": 6
},
{
"killRunningJobWhenTotalRunningTimeExpires": False,
"maxRestarts": 144,
"enableTotalRunningTime": False,
"restartable": False,
"jobTypeName": "File System and Indexing Based (Data Recovery)",
"restartIntervalInMinutes": 20,
"preemptable": False,
"totalRunningTime": 21600,
"jobType": 7
}
]
Returns:
None
Raises:
SDKException:
if input is not valid type
"""
if isinstance(settings, list):
for job in settings:
target = {'target': job_type for job_type in
self._restart_settings['jobRestartSettings']['jobTypeRestartSettingList']
if job_type['jobTypeName'] == job.get("jobTypeName")}
target.get('target').update(job)
self._set_jobmanagement_settings()
else:
raise SDKException('Job', '108')
def set_update_settings(self, settings):
"""
sets update settings for jobs
Args:
settings (list) -- list of dictionaries with following format
[
{
"appTypeName": "Windows File System",
"recoveryTimeInMinutes": 20,
"protectionTimeInMinutes": 20
},
{
"appTypeName": "Windows XP 64-bit File System",
"recoveryTimeInMinutes": 20,
"protectionTimeInMinutes": 20,
}
]
Returns:
None
Raises:
SDKException:
if input is not valid type
"""
if isinstance(settings, list):
for job in settings:
for job_type in self._update_settings['jobUpdatesSettings']['agentTypeJobUpdateIntervalList']:
if job_type['agentTypeEntity']['appTypeName'] == job.get("appTypeName"):
job.pop("appTypeName")
job_type.update(job)
break
self._set_jobmanagement_settings()
else:
raise SDKException('Job', '108')
@property
def job_priority_precedence(self):
"""
gets the job priority precedence
Returns:
(str) -- type of job priority precedence is set.
"""
available_priorities = {
1: "client",
2: "agentType"
}
return available_priorities.get(self._priority_settings["jobPrioritySettings"]["priorityPrecedence"])
@job_priority_precedence.setter
def job_priority_precedence(self, priority_type):
"""
sets job priority precedence
Args:
priority_type (str) -- type of priority to be set
Values:
"client"
"agentType"
"""
if isinstance(priority_type, str):
available_priorities = {
"client": 1,
"agentType": 2
}
self._priority_settings["jobPrioritySettings"]["priorityPrecedence"] = available_priorities[priority_type]
self._set_jobmanagement_settings()
else:
raise SDKException('Job', '108')
@property
def start_phase_retry_interval(self):
"""
gets the start phase retry interval in (minutes)
Returns:
(int) -- interval in minutes.
"""
return self._restart_settings["jobRestartSettings"]["startPhaseRetryIntervalInMinutes"]
@start_phase_retry_interval.setter
def start_phase_retry_interval(self, minutes):
"""
sets start phase retry interval for jobs
Args:
minutes (int) -- minutes to be set.
Raises:
SDKException:
if input is not valid type.
"""
if isinstance(minutes, int):
self._restart_settings["jobRestartSettings"]["startPhaseRetryIntervalInMinutes"] = minutes
self._set_jobmanagement_settings()
else:
raise SDKException('Job', '108')
@property
def state_update_interval_for_continuous_data_replicator(self):
"""
gets the state update interval for continuous data replicator in (minutes)
Returns:
(int) -- interval in minutes
"""
return self._update_settings["jobUpdatesSettings"]["stateUpdateIntervalForContinuousDataReplicator"]
@state_update_interval_for_continuous_data_replicator.setter
def state_update_interval_for_continuous_data_replicator(self, minutes):
"""
sets state update interval for continuous data replicator
Args:
minutes (int) -- minutes to be set.
Raises:
SDKException:
if input is not valid type
"""
if isinstance(minutes, int):
self._update_settings["jobUpdatesSettings"]["stateUpdateIntervalForContinuousDataReplicator"] = minutes
self._set_jobmanagement_settings()
else:
raise SDKException('Job', '108')
@property
def allow_running_jobs_to_complete_past_operation_window(self):
"""
Returns True if option is enabled
else returns false
"""
return self._general_settings.get('generalSettings').get("allowRunningJobsToCompletePastOperationWindow")
@allow_running_jobs_to_complete_past_operation_window.setter
def allow_running_jobs_to_complete_past_operation_window(self, flag):
"""
enable/disable, allow running jobs to complete past operation window.
Args:
flag (bool) -- (True/False) to be set.
Raises:
SDKException:
if input is not valid type
"""
if isinstance(flag, bool):
settings = {
"allowRunningJobsToCompletePastOperationWindow": flag
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def job_alive_check_interval_in_minutes(self):
"""
gets the job alive check interval in (minutes)
Returns:
(int) -- interval in minutes
"""
return self._general_settings.get('generalSettings').get("jobAliveCheckIntervalInMinutes")
@job_alive_check_interval_in_minutes.setter
def job_alive_check_interval_in_minutes(self, minutes):
"""
sets the job alive check interval in (minutes)
Args:
minutes -- minutes to be set.
Raises:
SDKException:
if input is not valid type
"""
if isinstance(minutes, int):
settings = {
"jobAliveCheckIntervalInMinutes": minutes
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def queue_scheduled_jobs(self):
"""
Returns True if option is enabled
else returns false
"""
return self._general_settings.get('generalSettings').get("queueScheduledJobs")
@queue_scheduled_jobs.setter
def queue_scheduled_jobs(self, flag):
"""
enable/disable, queue scheduled jobs
Args:
flag (bool) -- (True/False to be set)
Raises:
SDKException:
if input is not valid type
"""
if isinstance(flag, bool):
settings = {
"queueScheduledJobs": flag
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def enable_job_throttle_at_client_level(self):
"""
Returns True if option is enabled
else returns false
"""
return self._general_settings.get('generalSettings').get("enableJobThrottleAtClientLevel")
@enable_job_throttle_at_client_level.setter
def enable_job_throttle_at_client_level(self, flag):
"""
enable/disable, job throttle at client level
Args:
flag (bool) -- (True/False) to be set
Raises:
SDKException:
if input is not valid type
"""
if isinstance(flag, bool):
settings = {
"enableJobThrottleAtClientLevel": flag
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def enable_multiplexing_for_db_agents(self):
"""
Returns True if option is enabled
else returns False
"""
return self._general_settings.get('generalSettings').get("enableMultiplexingForDBAgents")
@enable_multiplexing_for_db_agents.setter
def enable_multiplexing_for_db_agents(self, flag):
"""
enable/disable, multiplexing for db agents
Args:
flag (bool) -- (True/False) to be set
Raises:
SDKException:
if input is not valid type
"""
if isinstance(flag, bool):
settings = {
"enableMultiplexingForDBAgents": flag
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def queue_jobs_if_conflicting_jobs_active(self):
"""
Returns True if option is enabled
else returns false
"""
return self._general_settings.get('generalSettings').get("queueJobsIfConflictingJobsActive")
@queue_jobs_if_conflicting_jobs_active.setter
def queue_jobs_if_conflicting_jobs_active(self, flag):
"""
enable/disable, queue jobs if conflicting jobs active
Args;
flag (bool) -- (True/False) to be set
Raises:
SDKException:
if input is not valid type
"""
if isinstance(flag, bool):
settings = {
"queueJobsIfConflictingJobsActive": flag
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def queue_jobs_if_activity_disabled(self):
"""
Returns True if option is enabled
else returns False
"""
return self._general_settings.get('generalSettings').get("queueJobsIfActivityDisabled")
@queue_jobs_if_activity_disabled.setter
def queue_jobs_if_activity_disabled(self, flag):
"""
enable/disable, queue jobs if activity disabled
Args;
flag (bool) -- (True/False) to be set
Raises:
SDKException:
if input is not valid type
"""
if isinstance(flag, bool):
settings = {
"queueJobsIfActivityDisabled": flag
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def backups_preempts_auxilary_copy(self):
"""
Returns True if option is enabled
else returns False
"""
return self._general_settings.get('generalSettings').get("backupsPreemptsAuxilaryCopy")
@backups_preempts_auxilary_copy.setter
def backups_preempts_auxilary_copy(self, flag):
"""
enable/disable, backups preempts auxiliary copy
Args:
flag (bool) -- (True/False) to be set
Raises:
SDKException:
if input is not valid type
"""
if isinstance(flag, bool):
settings = {
"backupsPreemptsAuxilaryCopy": flag
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def restore_preempts_other_jobs(self):
"""
Returns True if option is enabled
else returns False
"""
return self._general_settings.get('generalSettings').get("restorePreemptsOtherJobs")
@restore_preempts_other_jobs.setter
def restore_preempts_other_jobs(self, flag):
"""
enable/disable, restore preempts other jobs
Args:
flag (bool) -- (True/False) to be set
Raises:
SDKException:
if input is not valid type
"""
if isinstance(flag, bool):
settings = {
"restorePreemptsOtherJobs": flag
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def enable_multiplexing_for_oracle(self):
"""
Returns True if option is enabled
else returns False
"""
return self._general_settings.get('generalSettings').get("enableMultiplexingForOracle")
@enable_multiplexing_for_oracle.setter
def enable_multiplexing_for_oracle(self, flag):
"""
enable/disable, enable multiplexing for oracle
Args:
flag (bool) -- (True/False) to be set
Raises:
SDKException:
if input is not valid type
"""
if isinstance(flag, bool):
settings = {
"enableMultiplexingForOracle": flag
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def job_stream_high_water_mark_level(self):
"""
gets the job stream high water mark level
"""
return self._general_settings.get('generalSettings').get("jobStreamHighWaterMarkLevel")
@job_stream_high_water_mark_level.setter
def job_stream_high_water_mark_level(self, level):
"""
sets, job stream high water mak level
Args:
level (int) -- number of jobs to be performed at a time
Raises:
SDKException:
if input is not valid type
"""
if isinstance(level, int):
settings = {
"jobStreamHighWaterMarkLevel": level
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def backups_preempts_other_backups(self):
"""
Returns True if option is enabled
else returns False
"""
return self._general_settings.get('generalSettings').get("backupsPreemptsOtherBackups")
@backups_preempts_other_backups.setter
def backups_preempts_other_backups(self, flag):
"""
enable/disable, backups preempts other backups
Args:
flag (bool) -- (True/False) to be set
Raises:
SDKException:
if input is not a valid type
"""
if isinstance(flag, bool):
settings = {
"backupsPreemptsOtherBackups": flag
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
@property
def do_not_start_backups_on_disabled_client(self):
"""
Returns True if option is enabled
else returns False
"""
return self._general_settings.get('generalSettings').get("doNotStartBackupsOnDisabledClient")
@do_not_start_backups_on_disabled_client.setter
def do_not_start_backups_on_disabled_client(self, flag):
"""
enable/disable, do not start backups on disabled client
Args:
flag (bool) -- (True/False) to be set
Raises:
SDKException:
if input is not a valid type
"""
if isinstance(flag, bool):
settings = {
"doNotStartBackupsOnDisabledClient": flag
}
self.set_general_settings(settings)
else:
raise SDKException('Job', '108')
def get_restart_setting(self, jobtype):
"""
restart settings associated to particular jobtype can be obtained
Args:
jobtype (str) -- settings of the jobtype to get
Available jobtypes:
"Disaster Recovery backup"
"Auxiliary Copy"
"Data Aging"
"Download/Copy Updates"
"Offline Content Indexing"
"Information Management"
"File System and Indexing Based (Data Protection)"
"File System and Indexing Based (Data Recovery)"
"Exchange DB (Data Protection)"
"Exchange DB (Data Recovery)"
"Informix DB (Data Protection)"
"Informix DB (Data Recovery)"
"Lotus Notes DB (Data Protection)"
"Lotus Notes DB (Data Recovery)"
"Oracle DB (Data Protection)"
"Oracle DB (Data Recovery)"
"SQL DB (Data Protection)"
"SQL DB (Data Recovery)"
"MYSQL (Data Protection)"
` "MYSQL (Data Recovery)"
"Sybase DB (Data Protection)"
"Sybase DB (Data Recovery)"
"DB2 (Data Protection)"
"DB2 (Data Recovery)"
"CDR (Data Management)"
"Media Refresh"
"Documentum (Data Protection)"
"Documentum (Data Recovery)"
"SAP for Oracle (Data Protection)"
"SAP for Oracle (Data Recovery)"
"PostgreSQL (Data Protection)"
"PostgreSQL (Data Recovery)"
"Other (Data Protection)"
"Other (Data Recovery)"
"Workflow"
"DeDup DB Reconstruction"
"CommCell Migration Export"
"CommCell Migration Import"
"Install Software"
"Uninstall Software"
"Data Verification"
"Big Data Apps (Data Protection)"
"Big Data Apps (Data Recovery)"
"Cloud Apps (Data Protection)"
"Cloud Apps (Data Recovery)"
"Virtual Server (Data Protection)"
"Virtual Server (Data Recovery)"
"SAP for Hana (Data Protection)"
"SAP for Hana (Data Recovery)"
Returns:
dict -- settings of the specific job type as follows
{
"jobTypeName": "File System and Indexing Based (Data Protection)",
"restartable": true,
"maxRestarts": 10,
"restartIntervalInMinutes": 20,
"enableTotalRunningTime": false,
"totalRunningTime": 25200,
"killRunningJobWhenTotalRunningTimeExpires": false,
"preemptable": true,
}
Raises:
SDKException:
if input is not valid type
"""
if isinstance(jobtype, str):
for job_type in self._restart_settings['jobRestartSettings']['jobTypeRestartSettingList']:
if job_type['jobTypeName'] == jobtype:
settings = copy.deepcopy(job_type)
return settings
else:
raise SDKException('Job', '108')
def get_priority_setting(self, jobtype):
"""
priority settings associated to particular jobtype can be obtained
Args:
jobtype (str) -- settings of jobtype to get
Available values:
jobtypename:
"Information Management"
"Auxiliary Copy"
"Media Refresh"
"Data Verification"
"Persistent Recovery"
"Synth Full"
apptypename:
"Windows File System"
"Windows XP 64-bit File System"
"Windows 2003 32-bit File System"
"Windows 2003 64-bit File System"
"Active Directory"
"Windows File Archiver"
"File Share Archiver"
"Image Level"
"Exchange Mailbox (Classic)"
"Exchange Mailbox Archiver"
"Exchange Compliance Archiver"
"Exchange Public Folder"
"Exchange Database"
"SharePoint Database"
"SharePoint Server Database"
"SharePoint Document"
"SharePoint Server"
"Novell Directory Services"
"GroupWise DB"
"NDMP"
"Notes Document"
"Unix Notes Database"
"MAC FileSystem"
"Big Data Apps"
"Solaris File System"
"Solaris 64bit File System"
"FreeBSD"
"HP-UX File System"
"HP-UX 64bit File System"
"AIX File System"
"Unix Tru64 64-bit File System"
"Linux File System"
"Sybase Database"
"Oracle Database"
"Oracle RAC"
"Informix Database"
"DB2"
"DB2 on Unix"
"SAP for Oracle"
"SAP for MAX DB"
"ProxyHost on Unix"
"ProxyHost"
"Image Level On Unix"
"OSSV Plug-in on Windows"
"OSSV Plug-in on Unix"
"Unix File Archiver"
"SQL Server"
"Data Classification"
"OES File System on Linux"
"Centera"
"Exchange PF Archiver"
"Domino Mailbox Archiver"
"MS SharePoint Archiver"
"Content Indexing Agent"
"SRM Agent For Windows File Systems"
"SRM Agent For UNIX File Systems"
"DB2 MultiNode"
"MySQL"
"Virtual Server"
"SharePoint Search Connector"
"Object Link"
"PostgreSQL"
"Sybase IQ"
"External Data Connector"
"Documentum"
"Object Store"
"SAP HANA"
"Cloud Apps"
"Exchange Mailbox"
Returns:
dict -- settings of a specific jobtype
ex:
{
"jobTypeName": "Information Management",
"combinedPriority": 0,
"type_of_operation": 1
}
or
settings of a specific apptype
ex:
{
"appTypeName": "Windows File System",
"combinedPriority": 6,
"type_of_operation": 2
}
Raises:
SDKException:
if input is not valid type
"""
if isinstance(jobtype, str):
for job_type in self._priority_settings['jobPrioritySettings']['jobTypePriorityList']:
if job_type['jobTypeName'] == jobtype:
settings = {
'jobTypeName': job_type.get('jobTypeName'),
'combinedPriority': job_type.get('combinedPriority'),
'type_of_operation': 1
}
return settings
for job_type in self._priority_settings['jobPrioritySettings']['agentTypePriorityList']:
if job_type['agentTypeEntity']['appTypeName'] == jobtype:
settings = {
'appTypeName': job_type.get('agentTypeEntity').get('appTypeName'),
'combinedPriority': job_type.get('combinedPriority'),
'type_of_operation': 2
}
return settings
else:
raise SDKException('Job', '108')
def get_update_setting(self, jobtype):
"""
update settings associated to particular jobtype can be obtained
Args:
jobtype (str) -- settings of jobtype to get
Available jobtype
Check get_priority_setting(self, jobtype) method documentation.
Returns:
dict - settings of a jobtype
{
"appTypeName": "Windows File System",
"recoveryTimeInMinutes": 20,
"protectionTimeInMinutes": 20
}
Raises:
SDKException:
if input is not valid type
"""
if isinstance(jobtype, str):
for job_type in self._update_settings['jobUpdatesSettings']['agentTypeJobUpdateIntervalList']:
if job_type['agentTypeEntity']['appTypeName'] == jobtype:
settings = {
'appTypeName': job_type.get('agentTypeEntity').get('appTypeName'),
'recoveryTimeInMinutes': job_type.get('recoveryTimeInMinutes'),
'protectionTimeInMinutes': job_type.get('protectionTimeInMinutes')
}
return settings
else:
raise SDKException('Job', '108')
@property
def general_settings(self):
"""
gets the general settings.
Returns: (dict) -- The general settings
"""
return self._general_settings
@property
def restart_settings(self):
"""
gets the restart settings.
Returns: (dict) -- The restart settings.
"""
return self._restart_settings
@property
def priority_settings(self):
"""
gets the priority settings.
Returns: (dict) -- The priority settings.
"""
return self._priority_settings
@property
def update_settings(self):
"""
gets the update settings.
Returns: (dict) -- The update settings.
"""
return self._update_settings
def set_job_error_threshold(self, error_threshold_dict):
"""
Args:
error_threshold_dict (dict) : A dictionary of following key/value pairs can be set.
Returns:
None
"""
raise NotImplementedError("Yet To Be Implemented")
class Job(object):
"""Class for performing client operations for a specific client."""
def __init__(self, commcell_object, job_id):
"""Initialise the Job class instance.
Args:
commcell_object (object) -- instance of the Commcell class
job_id (str / int) -- id of the job
Returns:
object - instance of the Job class
Raises:
SDKException:
if job id is not an integer
if job is not a valid job, i.e., does not exist in the Commcell
"""
try:
int(job_id)
except ValueError:
raise SDKException('Job', '101')
self._commcell_object = commcell_object
self._cvpysdk_object = commcell_object._cvpysdk_object
self._services = commcell_object._services
self._update_response_ = commcell_object._update_response_
self._job_id = str(job_id)
self._JOB = self._services['JOB'] % (self.job_id)
if not self._is_valid_job():
raise SDKException('Job', '103')
self._JOB_DETAILS = self._services['JOB_DETAILS']
self.ADVANCED_JOB_DETAILS = AdvancedJobDetailType
self._SUSPEND = self._services['SUSPEND_JOB'] % self.job_id
self._RESUME = self._services['RESUME_JOB'] % self.job_id
self._KILL = self._services['KILL_JOB'] % self.job_id
self._RESUBMIT = self._services['RESUBMIT_JOB'] % self.job_id
self._client_name = None
self._agent_name = None
self._instance_name = None
self._backupset_name = None
self._subclient_name = None
self._job_type = None
self._backup_level = None
self._start_time = None
self._end_time = None
self._delay_reason = None
self._pending_reason = None
self._status = None
self._phase = None
self._summary = None
self._details = None
self.refresh()
def __repr__(self):
"""String representation of the instance of this class.
Returns:
str - string for instance of this class
"""
representation_string = 'Job class instance for job id: "{0}"'
return representation_string.format(self.job_id)
def _is_valid_job(self):
"""Checks if the job submitted with the job id is a valid job or not.
Returns:
bool - boolean that represents whether the job is valid or not
"""
for _ in range(10):
try:
self._get_job_summary()
return True
except SDKException as excp:
if excp.exception_module == 'Job' and excp.exception_id == '104':
time.sleep(1.5)
continue
else:
raise excp
return False
def _get_job_summary(self):
"""Gets the properties of this job.
Returns:
dict - dict that contains the summary of this job
Raises:
SDKException:
if no record found for this job
if response is empty
if response is not success
"""
attempts = 3
for _ in range(attempts): # Retrying to ignore the transient case when no jobs are found
flag, response = self._cvpysdk_object.make_request('GET', self._JOB)
if flag:
if response.json():
if response.json().get('totalRecordsWithoutPaging', 0) == 0:
time.sleep(3)
continue
if 'jobs' in response.json():
for job in response.json()['jobs']:
return job['jobSummary']
else:
raise SDKException('Response', '102')
else:
response_string = self._update_response_(response.text)
raise SDKException('Response', '101', response_string)
raise SDKException('Job', '104')
def _get_job_details(self):
"""Gets the detailed properties of this job.
Returns:
dict - dict consisting of the detailed properties of the job
Raises:
SDKException:
if failed to get the job details
if response is empty
if response is not success
"""
payload = {
"jobId": int(self.job_id)
}
flag, response = self._cvpysdk_object.make_request('POST', self._JOB_DETAILS, payload)
if flag:
if response.json():
if 'job' in response.json():
return response.json()['job']
elif 'error' in response.json():
error_code = response.json()['error']['errList'][0]['errorCode']
error_message = response.json()['error']['errList'][0]['errLogMessage']
raise SDKException(
'Job',
'105',
'Error Code: "{0}"\nError Message: "{1}"'.format(error_code, error_message)
)
else:
raise SDKException('Job', '106', 'Response JSON: {0}'.format(response.json()))
else:
raise SDKException('Response', '102')
else:
response_string = self._update_response_(response.text)
raise SDKException('Response', '101', response_string)
def _initialize_job_properties(self):
"""Initializes the common properties for the job.
Adds the client, agent, backupset, subclient name to the job object.
"""
self._summary = self._get_job_summary()
self._details = self._get_job_details()
self._status = self._summary['status']
self._start_time = time.strftime(
'%Y-%m-%d %H:%M:%S', time.gmtime(self._summary['jobStartTime'])
)
def _wait_for_status(self, status):
"""Waits for 6 minutes or till the job status is changed to given status,
whichever is earlier.
Args:
status (str) -- Job Status
Returns:
None
"""
start_time = time.time()
while self.status.lower() != status.lower():
if (self.is_finished is True) or (time.time() - start_time > 360):
break
time.sleep(3)
def wait_for_completion(self, timeout=30):
"""Waits till the job is not finished; i.e.; till the value of job.is_finished is not True.
Kills the job and exits, if the job has been in Pending / Waiting state for more than
the timeout value.
In case of job failure job status and failure reason can be obtained
using status and delay_reason property
Args:
timeout (int) -- minutes after which the job should be killed and exited,
if the job has been in Pending / Waiting state
default: 30
Returns:
bool - boolean specifying whether the job had finished or not
True - if the job had finished successfully
False - if the job was killed/failed
"""
start_time = time.time()
pending_time = 0
waiting_time = 0
previous_status = None
status_list = ['pending', 'waiting']
while not self.is_finished:
time.sleep(30)
# get the current status of the job
status = self.status.lower()
# set the value of start time as current time
# if the current status is pending / waiting but the previous status was not
# also if the current status is pending / waiting and same as previous,
# then don't update the value of start time
if status in status_list and previous_status not in status_list:
start_time = time.time()
if status == 'pending':
pending_time = (time.time() - start_time) / 60
else:
pending_time = 0
if status == 'waiting':
waiting_time = (time.time() - start_time) / 60
else:
waiting_time = 0
if pending_time > timeout or waiting_time > timeout:
self.kill()
break
# set the value of previous status as the value of current status
previous_status = status
else:
return self._status.lower() not in ["failed", "killed", "failed to start"]
return False
@property
def is_finished(self):
"""Checks whether the job has finished or not.
Returns:
bool - boolean that represents whether the job has finished or not
"""
self._summary = self._get_job_summary()
self._details = self._get_job_details()
self._status = self._summary['status']
if self._summary['lastUpdateTime'] != 0:
self._end_time = time.strftime(
'%Y-%m-%d %H:%M:%S', time.gmtime(self._summary['lastUpdateTime'])
)
return ('completed' in self._status.lower() or
'killed' in self._status.lower() or
'failed' in self._status.lower())
@property
def client_name(self):
"""Treats the client name as a read-only attribute."""
if 'clientName' in self._summary['subclient']:
return self._summary['subclient']['clientName']
@property
def agent_name(self):
"""Treats the agent name as a read-only attribute."""
if 'appName' in self._summary['subclient']:
return self._summary['subclient']['appName']
@property
def instance_name(self):
"""Treats the instance name as a read-only attribute."""
if 'instanceName' in self._summary['subclient']:
return self._summary['subclient']['instanceName']
@property
def backupset_name(self):
"""Treats the backupset name as a read-only attribute."""
if 'backupsetName' in self._summary['subclient']:
return self._summary['subclient']['backupsetName']
@property
def subclient_name(self):
"""Treats the subclient name as a read-only attribute."""
if 'subclientName' in self._summary['subclient']:
return self._summary['subclient']['subclientName']
@property
def status(self):
"""Treats the job status as a read-only attribute."""
self.is_finished
return self._status
@property
def job_id(self):
"""Treats the job id as a read-only attribute."""
return self._job_id
@property
def job_type(self):
"""Treats the job type as a read-only attribute."""
return self._summary['jobType']
@property
def backup_level(self):
"""Treats the backup level as a read-only attribute."""
if 'backupLevelName' in self._summary:
return self._summary['backupLevelName']
@property
def start_time(self):
"""Treats the start time as a read-only attribute."""
return self._start_time
@property
def start_timestamp(self):
"""Treats the unix start time as a read-only attribute."""
return self._summary['jobStartTime']
@property
def end_timestamp(self):
"""Treats the unix end time as a read-only attribute"""
return self._summary['jobEndTime']
@property
def end_time(self):
"""Treats the end time as a read-only attribute."""
return self._end_time
@property
def delay_reason(self):
"""Treats the job delay reason as a read-only attribute."""
self.is_finished
progress_info = self._details['jobDetail']['progressInfo']
if 'reasonForJobDelay' in progress_info and progress_info['reasonForJobDelay']:
return progress_info['reasonForJobDelay']
@property
def pending_reason(self):
"""Treats the job pending reason as a read-only attribute."""
self.is_finished
if 'pendingReason' in self._summary and self._summary['pendingReason']:
return self._summary['pendingReason']
@property
def phase(self):
"""Treats the job current phase as a read-only attribute."""
self.is_finished
if 'currentPhaseName' in self._summary:
return self._summary['currentPhaseName']
@property
def summary(self):
"""Treats the job full summary as a read-only attribute."""
self.is_finished
return self._summary
@property
def username(self):
"""Treats the username as a read-only attribute."""
return self._summary['userName']['userName']
@property
def userid(self):
"""Treats the userid as a read-only attribute."""
return self._summary['userName']['userId']
@property
def details(self):
"""Treats the job full details as a read-only attribute."""
self.is_finished
return self._details
@property
def size_of_application(self):
"""Treats the size of application as a read-only attribute."""
if 'sizeOfApplication' in self._summary:
return self._summary['sizeOfApplication']
@property
def num_of_files_transferred(self):
"""Treats the number of files transferred as a read-only attribute."""
self.is_finished
return self._details['jobDetail']['progressInfo']['numOfFilesTransferred']
@property
def state(self):
"""Treats the job state as a read-only attribute."""
self.is_finished
return self._details['jobDetail']['progressInfo']['state']
def pause(self, wait_for_job_to_pause=False):
"""Suspends the job.
Args:
wait_for_job_to_pause (bool) -- wait till job status is changed to Suspended
default: False
Raises:
SDKException:
if failed to suspend job
if response is not success
"""
flag, response = self._cvpysdk_object.make_request('POST', self._SUSPEND)
self.is_finished
if flag:
if response.json() and 'errors' in response.json():
error_list = response.json()['errors'][0]['errList'][0]
error_code = error_list['errorCode']
error_message = error_list['errLogMessage'].strip()
if error_code != 0:
raise SDKException(
'Job', '102', 'Job suspend failed\nError: "{0}"'.format(error_message)
)
else:
response_string = self._update_response_(response.text)
raise SDKException('Response', '101', response_string)
if wait_for_job_to_pause is True:
self._wait_for_status("SUSPENDED")
def resume(self, wait_for_job_to_resume=False):
"""Resumes the job.
Args:
wait_for_job_to_resume (bool) -- wait till job status is changed to Running
default: False
Raises:
SDKException:
if failed to resume job
if response is not success
"""
flag, response = self._cvpysdk_object.make_request('POST', self._RESUME)
self.is_finished
if flag:
if response.json() and 'errors' in response.json():
error_list = response.json()['errors'][0]['errList'][0]
error_code = error_list['errorCode']
error_message = error_list['errLogMessage'].strip()
if error_code != 0:
raise SDKException(
'Job', '102', 'Job resume failed\nError: "{0}"'.format(error_message)
)
else:
response_string = self._update_response_(response.text)
raise SDKException('Response', '101', response_string)
if wait_for_job_to_resume is True:
self._wait_for_status("RUNNING")
def resubmit(self):
"""Resubmits the job
Returns:
object - Job class object for the given job id
Raises:
SDKException:
if job is already running
if response is not success
"""
if not self.is_finished:
raise SDKException('Job', '102', 'Cannot resubmit the Job, the Job is still running')
flag, response = self._cvpysdk_object.make_request('POST', self._RESUBMIT)
if flag:
if response.json() and 'errors' in response.json():
error_list = response.json()['errors'][0]['errList'][0]
error_code = error_list['errorCode']
error_message = error_list['errLogMessage'].strip()
if error_code != 0:
raise SDKException(
'Job', '102', 'Resubmitting job failed\nError: "{0}"'.format(error_message)
)
return Job(self._commcell_object, response.json()['jobIds'][0])
else:
response_string = self._update_response_(response.text)
raise SDKException('Response', '101', response_string)
def kill(self, wait_for_job_to_kill=False):
"""Kills the job.
Args:
wait_for_job_to_kill (bool) -- wait till job status is changed to Killed
default: False
Raises:
SDKException:
if failed to kill job
if response is not success
"""
flag, response = self._cvpysdk_object.make_request('POST', self._KILL)
self.is_finished
if flag:
if response.json() and 'errors' in response.json():
error_list = response.json()['errors'][0]['errList'][0]
error_code = error_list['errorCode']
error_message = error_list['errLogMessage'].strip()
if error_code != 0:
raise SDKException(
'Job', '102', 'Job kill failed\nError: "{0}"'.format(error_message)
)
else:
response_string = self._update_response_(response.text)
raise SDKException('Response', '101', response_string)
if wait_for_job_to_kill is True:
self._wait_for_status("KILLED")
def refresh(self):
"""Refresh the properties of the Job."""
self._initialize_job_properties()
self.is_finished
def advanced_job_details(self, info_type):
"""Returns advanced properties for the job
Args:
infoType (object) -- job detail type to be passed from AdvancedJobDetailType
enum from the constants
Returns:
dict - dictionary with advanced details of the job info type given
Raises:
SDKException:
if response is empty
if response is not success
"""
if not isinstance(info_type, AdvancedJobDetailType):
raise SDKException('Response', '107')
url = self._services['ADVANCED_JOB_DETAIL_TYPE'] % (self.job_id, info_type.value)
flag, response = self._cvpysdk_object.make_request('GET', url)
if flag:
if response.json():
response = response.json()
if response.get('errorCode', 0) != 0:
error_message = response.json()['errorMessage']
o_str = 'Failed to fetch details.\nError: "{0}"'.format(error_message)
raise SDKException('Job', '102', o_str)
return response
else:
raise SDKException('Response', '102')
else:
raise SDKException('Response', '101', self._update_response_(response.text))
class _ErrorRule:
"""Class for enabling, disabling, adding, getting and deleting error rules."""
def __init__(self, commcell):
self.commcell = commcell
self.rule_dict = {}
self.xml_body = """
<App_SetJobErrorDecision>
<entity _type_="1" commCellId="{commcell_id}" commCellName="{commserv_name}" />
<jobErrorRuleList>
<idaRuleList isEnabled="{enable_flag_ida}">
<ida _type_="78" appGroupId="57" appGroupName="{app_group_name}" />
<ruleList>{final_str}<srcEntity _type_="1" commCellId="{commcell_id}" /></ruleList>
<osEntity _type_="161" />
</idaRuleList>
</jobErrorRuleList>
</App_SetJobErrorDecision>
"""
self.error_rule_str = """
<ruleList blockedFileTypes="0" isEnabled="{is_enabled}" jobDecision="{job_decision}" pattern="{pattern}" skipTLbackups="0" skipofflineDBs="0" skippedFiles="0">
<errorCode allErrorCodes="{all_error_codes}" fromValue="{from_error_code}" skipReportingError="{skip_reporting_error}" toValue="{to_error_code}" />
</ruleList>
"""
def _get_xml_for_rule(self, rule_dict):
"""
Returns the XML for a given rule's dictionary of key value pairs. The XML output is used internally when
when adding new or updating existing rules.
Args:
rule_dict (dict) - Dictionary of a rule's key value pairs.
Returns:
str - The XML output formatted as a string.
Raises:
None
"""
return self.error_rule_str.format(
pattern=rule_dict['pattern'],
all_error_codes=rule_dict['all_error_codes'],
from_error_code=rule_dict['from_error_code'],
to_error_code=rule_dict['to_error_code'],
job_decision=rule_dict['job_decision'],
is_enabled=rule_dict['is_enabled'],
skip_reporting_error=rule_dict['skip_reporting_error'])
def add_error_rule(self, rules_arg):
"""
Add new error rules as well as update existing rules, each rule is identified by its rule name denoted by key
rule_name.
Args:
rules_arg (dict) -- A dictionary whose key is the application group name and value is a rules list.
Supported value(s) for key is all constants under ApplicationGroup(Enum)
The value for above key is a list
where each item of the list is a dictionary of the following key value pairs.
is_enabled (str) -- Specifies whether the rule should be enabled or not.
pattern (str) -- Specifies the file pattern for the error rule.
all_error_codes (bool) -- Specifies whether all error codes should be enabled.
from_error_code (int) -- Error code range's lower value.
Valid values are all non negative integers.
to_error_code (int) -- Error code range's upper value.
Valid values are all non negative integers higher larger the from_ec value.
skip_reporting_error (bool) -- Specifies if error codes need to be skipped from being reported.
Example:
{
WINDOWS : { 'rule_1': { 'appGroupName': WINDOWS,
'pattern': "*",
'all_error_codes': False,
'from_error_code': 1,
'to_error_code': 2,
'job_decision': 0,
'is_enabled': True,
'skip_reporting_error': False
},
'rule_2' : { ......}
}
}
Returns:
None
Raises:
Exception in case of invalid key/value pair(s).
"""
final_str = ""
old_values = []
for app_group, rules_dict in rules_arg.items():
assert (app_group.name in [i.name for i in ApplicationGroup])
# FETCH ALL EXISTING RULES ON THE COMMCELL FOR THE APPLICATION
# GROUP IN QUESTION
existing_error_rules = self._get_error_rules(app_group)
for rule_name, rule in rules_dict.items():
assert isinstance(
rule['pattern'], str) and isinstance(
rule['all_error_codes'], bool) and isinstance(
rule['skip_reporting_error'], int) and isinstance(
rule['from_error_code'], int) and isinstance(
rule['to_error_code'], int) and isinstance(
rule['job_decision'], int) and rule['job_decision'] in range(
0, 3) and isinstance(
rule['is_enabled'], bool), "Invalid key value pairs provided."
rule_dict = {k:v for k,v in rule.items() if k != 'appGroupName'}
# GET RULE STRING FOR EACH RULE DICTIONARY PROVIDED IN THE ARGUMENT
new_rule_str = self._get_xml_for_rule(rule_dict)
# IF RULE NAME NOT PRESENT IN OUR INTERNAL STRUCTURE, IT MEANS USER IS ADDING NEW RULE
if rule_name not in list(self.rule_dict.keys()):
self.rule_dict[rule_name] = {'new_value': new_rule_str, 'old_value': new_rule_str}
final_str = ''.join((final_str, new_rule_str))
# ELSE CHECK IF THE RULE'S VALUE REMAINS SAME AND IF IT DOES, WE SIMPLY CONTINUE AND STORE EXISTING VALUE
elif new_rule_str == self.rule_dict[rule_name]['old_value']:
final_str = ''.join((final_str, self.rule_dict[rule_name]['old_value']))
# ELSE RULE IS BEING UPDATED, STORE NEW VALUE IN FINAL STRING AND PRESERVE OLD VALUE AS WELL
else:
self.rule_dict[rule_name]['old_value'] = self.rule_dict[rule_name]['new_value']
self.rule_dict[rule_name]['new_value'] = new_rule_str
final_str = ''.join((final_str, new_rule_str))
# NOW GO THROUGH ALL EXISTING RULES ON CS AND EITHER PRESERVE OR UPDATE IT
# PREPARE A LIST OF ALL OLD VALUES FIRST
for rule_name, values in self.rule_dict.items():
old_values.extend([value for value_type, value in values.items() if value_type == 'old_value'])
for existing_error_rule in existing_error_rules:
existing_rule_dict = {'pattern': existing_error_rule['pattern'],
'all_error_codes': existing_error_rule['errorCode']['allErrorCodes'],
'skip_reporting_error': existing_error_rule['errorCode']['skipReportingError'],
'from_error_code': existing_error_rule['errorCode']['fromValue'],
'to_error_code': existing_error_rule['errorCode']['toValue'],
'job_decision': existing_error_rule['jobDecision'],
'is_enabled': existing_error_rule['isEnabled']}
existing_rule_str = self._get_xml_for_rule(existing_rule_dict)
# AN EXISTING RULE THAT HAS NOT BEEN UPDATED AND IS NOT ADDED BY THE TEST CASE OR THROUGH AUTOMATION.
# IN OTHER WORDS, AN EXISTING RULE THAT WAS ADDED OUTSIDE OF THE SCOPE OF THE TEST CASE
if existing_rule_str not in old_values:
final_str = ''.join((final_str, existing_rule_str))
# NEED TO ADD SUPPORT FOR UPDATION OF ERROR RULES FOR MULTIPLE iDAs SIMULTANEOUSLY
xml_body = self.xml_body.format(commcell_id=self.commcell.commcell_id,
commserv_name=self.commcell.commserv_name,
enable_flag_ida=1,
app_group_name=app_group,
final_str=final_str)
xml_body = ''.join(i.lstrip().rstrip() for i in xml_body.split("\n"))
self.commcell.qoperation_execute(xml_body)
def enable(self, app_group):
"""Enables the job error control rules for the specified Application Group Type.
Args:
app_group (str) -- The iDA for which the enable flag needs to be set.
Currently supported values are APPGRP_WindowsFileSystemIDA.
Returns:
None
Raises:
None
"""
return self._modify_job_status_on_errors(app_group, enable_flag=True)
def disable(self, app_group):
"""Disables the job error control rules for the specified Application Group Type.
Args:
app_group (str) -- The iDA for which the enable flag needs to be set.
Currently supported values are APPGRP_WindowsFileSystemIDA.
Returns:
None
Raises:
None
"""
return self._modify_job_status_on_errors(app_group, enable_flag=False)
def _modify_job_status_on_errors(self, app_group, enable_flag):
"""To enable or disable job status on errors.
Args:
app_group (str) -- The iDA for which the enable flag needs to be set.
Currently supported values are APPGRP_WindowsFileSystemIDA.
enable_flag (bool) -- Enables and disables job status on errors.
Returns:
None
Raises:
None
"""
# FETCHING ALL EXISTING RULES
error_rules = self._get_error_rules(app_group)
# FOR EVERY RULE IN RULE LIST
for rule in error_rules:
rule_str = self.error_rule_str.format(pattern=rule['pattern'],
all_error_codes=rule['errorCode']['allErrorCodes'],
from_error_code=rule['errorCode']['fromValue'],
to_error_code=rule['errorCode']['toValue'],
job_decision=rule['jobDecision'],
is_enabled=rule['isEnabled'],
skip_reporting_error=rule['errorCode']['skipReportingError'])
final_str = ''.join((final_str, rule_str))
xml_body = self.xml_body.format(commcell_id=self.commcell.commcell_id,
commserv_name=self.commcell.commserv_name,
enable_flag_ida=1 if enable_flag else 0,
final_str=final_str)
xml_body = ''.join(i.lstrip().rstrip() for i in xml_body.split("\n"))
return self.commcell.qoperation_execute(xml_body)
def _get_error_rules(self, app_group):
"""
Returns the error rules set on the CS in the form of a dictionary.
Args:
app_group (str) -- The iDA for which the enable flag needs to be set.
Currently supported values are APPGRP_WindowsFileSystemIDA.
Returns:
list - A list of error rules. Each rule will be a dictionary of key value pairs for pattern,
error code from value, error code to value etc.
Raises:
None
"""
rule_list = []
xml_body = f"""
<App_GetJobErrorDecisionReq>
<entity _type_="1" commCellId="{self.commcell.commcell_id}" commCellName="{self.commcell.commserv_name}"/>
</App_GetJobErrorDecisionReq>"""
xml_body = ''.join(i.lstrip().rstrip() for i in xml_body.split("\n"))
error_rules = self.commcell.qoperation_execute(xml_body)
if any(error_rules):
ida_rule_lists = error_rules['jobErrorRuleList']['idaRuleList']
for ida_rule_list in ida_rule_lists:
# HARD CODED FOR WINDOWS SUPPORT ONLY
if ida_rule_list['ida']['appGroupName'] == app_group:
try:
rule_list = ida_rule_list['ruleList']['ruleList']
except Exception:
pass
return rule_list
| 39.360799
| 168
| 0.493964
|
795363355d9fde49e4a0e234d935c9d6f171812e
| 3,247
|
py
|
Python
|
examples/contrib/send_more_money_any_base.py
|
AlohaChina/or-tools
|
1ece0518104db435593a1a21882801ab6ada3e15
|
[
"Apache-2.0"
] | 8,273
|
2015-02-24T22:10:50.000Z
|
2022-03-31T21:19:27.000Z
|
examples/contrib/send_more_money_any_base.py
|
AlohaChina/or-tools
|
1ece0518104db435593a1a21882801ab6ada3e15
|
[
"Apache-2.0"
] | 2,530
|
2015-03-05T04:27:21.000Z
|
2022-03-31T06:13:02.000Z
|
examples/contrib/send_more_money_any_base.py
|
AlohaChina/or-tools
|
1ece0518104db435593a1a21882801ab6ada3e15
|
[
"Apache-2.0"
] | 2,057
|
2015-03-04T15:02:02.000Z
|
2022-03-30T02:29:27.000Z
|
# Copyright 2010 Hakan Kjellerstrand hakank@gmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
SEND+MORE=MONEY in 'any' base in Google CP Solver.
Alphametic problem SEND+MORE=MONEY in any base.
Examples:
Base 10 has one solution:
{9, 5, 6, 7, 1, 0, 8, 2}
Base 11 has three soltutions:
{10, 5, 6, 8, 1, 0, 9, 2}
{10, 6, 7, 8, 1, 0, 9, 3}
{10, 7, 8, 6, 1, 0, 9, 2}
Also, compare with the following models:
* Comet : http://www.hakank.org/comet/send_more_money_any_base.co
* ECLiPSE : http://www.hakank.org/eclipse/send_more_money_any_base.ecl
* Essence : http://www.hakank.org/tailor/send_more_money_any_base.eprime
* Gecode : http://www.hakank.org/gecode/send_more_money_any_base.cpp
* Gecode/R: http://www.hakank.org/gecode_r/send_more_money_any_base.rb
* MiniZinc: http://www.hakank.org/minizinc/send_more_money_any_base.mzn
* Zinc: http://www.hakank.org/minizinc/send_more_money_any_base.zinc
* SICStus: http://www.hakank.org/sicstus/send_more_money_any_base.pl
This model was created by Hakan Kjellerstrand (hakank@gmail.com)
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
import sys
from ortools.constraint_solver import pywrapcp
def main(base=10):
# Create the solver.
solver = pywrapcp.Solver('Send most money')
# data
print('base:', base)
# declare variables
s = solver.IntVar(0, base - 1, 's')
e = solver.IntVar(0, base - 1, 'e')
n = solver.IntVar(0, base - 1, 'n')
d = solver.IntVar(0, base - 1, 'd')
m = solver.IntVar(0, base - 1, 'm')
o = solver.IntVar(0, base - 1, 'o')
r = solver.IntVar(0, base - 1, 'r')
y = solver.IntVar(0, base - 1, 'y')
x = [s, e, n, d, m, o, r, y]
#
# constraints
#
solver.Add(solver.AllDifferent(x))
solver.Add(
s * base**3 + e * base**2 + n * base + d + m * base**3 + o * base**2 +
r * base + e == m * base**4 + o * base**3 + n * base**2 + e * base + y,)
solver.Add(s > 0)
solver.Add(m > 0)
#
# solution and search
#
solution = solver.Assignment()
solution.Add(x)
collector = solver.AllSolutionCollector(solution)
solver.Solve(
solver.Phase(x, solver.CHOOSE_FIRST_UNBOUND, solver.ASSIGN_MAX_VALUE),
[collector])
num_solutions = collector.SolutionCount()
money_val = 0
for s in range(num_solutions):
print('x:', [collector.Value(s, x[i]) for i in range(len(x))])
print()
print('num_solutions:', num_solutions)
print('failures:', solver.Failures())
print('branches:', solver.Branches())
print('WallTime:', solver.WallTime())
print()
base = 10
if __name__ == '__main__':
# for base in range(10,30):
# main(base)
if len(sys.argv) > 1:
base = int(sys.argv[1])
main(base)
| 29.252252
| 78
| 0.665845
|
795363884197e2349452d08e6048d23654b01b72
| 19,395
|
py
|
Python
|
sympy/utilities/tests/test_lambdify.py
|
oliverlee/sympy
|
961a75bd0adc40fc1d405ffdae9f49b52d96bed0
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/utilities/tests/test_lambdify.py
|
oliverlee/sympy
|
961a75bd0adc40fc1d405ffdae9f49b52d96bed0
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/utilities/tests/test_lambdify.py
|
oliverlee/sympy
|
961a75bd0adc40fc1d405ffdae9f49b52d96bed0
|
[
"BSD-3-Clause"
] | null | null | null |
from sympy.utilities.pytest import XFAIL, raises
from sympy import (
symbols, lambdify, sqrt, sin, cos, tan, pi, acos, acosh, Rational,
Float, Matrix, Lambda, Piecewise, exp, Integral, oo, I, Abs, Function,
true, false, And, Or, Not, ITE, Min, Max)
from sympy.printing.lambdarepr import LambdaPrinter
import mpmath
from sympy.utilities.lambdify import implemented_function
from sympy.utilities.pytest import skip
from sympy.utilities.decorator import conserve_mpmath_dps
from sympy.external import import_module
import math
import sympy
MutableDenseMatrix = Matrix
numpy = import_module('numpy')
numexpr = import_module('numexpr')
w, x, y, z = symbols('w,x,y,z')
#================== Test different arguments =======================
def test_no_args():
f = lambdify([], 1)
raises(TypeError, lambda: f(-1))
assert f() == 1
def test_single_arg():
f = lambdify(x, 2*x)
assert f(1) == 2
def test_list_args():
f = lambdify([x, y], x + y)
assert f(1, 2) == 3
def test_str_args():
f = lambdify('x,y,z', 'z,y,x')
assert f(3, 2, 1) == (1, 2, 3)
assert f(1.0, 2.0, 3.0) == (3.0, 2.0, 1.0)
# make sure correct number of args required
raises(TypeError, lambda: f(0))
def test_own_namespace():
myfunc = lambda x: 1
f = lambdify(x, sin(x), {"sin": myfunc})
assert f(0.1) == 1
assert f(100) == 1
def test_own_module():
f = lambdify(x, sin(x), math)
assert f(0) == 0.0
def test_bad_args():
# no vargs given
raises(TypeError, lambda: lambdify(1))
# same with vector exprs
raises(TypeError, lambda: lambdify([1, 2]))
def test_atoms():
# Non-Symbol atoms should not be pulled out from the expression namespace
f = lambdify(x, pi + x, {"pi": 3.14})
assert f(0) == 3.14
f = lambdify(x, I + x, {"I": 1j})
assert f(1) == 1 + 1j
#================== Test different modules =========================
# high precision output of sin(0.2*pi) is used to detect if precision is lost unwanted
@conserve_mpmath_dps
def test_sympy_lambda():
mpmath.mp.dps = 50
sin02 = mpmath.mpf("0.19866933079506121545941262711838975037020672954020")
f = lambdify(x, sin(x), "sympy")
assert f(x) == sin(x)
prec = 1e-15
assert -prec < f(Rational(1, 5)).evalf() - Float(str(sin02)) < prec
# arctan is in numpy module and should not be available
raises(NameError, lambda: lambdify(x, arctan(x), "sympy"))
@conserve_mpmath_dps
def test_math_lambda():
mpmath.mp.dps = 50
sin02 = mpmath.mpf("0.19866933079506121545941262711838975037020672954020")
f = lambdify(x, sin(x), "math")
prec = 1e-15
assert -prec < f(0.2) - sin02 < prec
raises(TypeError, lambda: f(x))
# if this succeeds, it can't be a python math function
@conserve_mpmath_dps
def test_mpmath_lambda():
mpmath.mp.dps = 50
sin02 = mpmath.mpf("0.19866933079506121545941262711838975037020672954020")
f = lambdify(x, sin(x), "mpmath")
prec = 1e-49 # mpmath precision is around 50 decimal places
assert -prec < f(mpmath.mpf("0.2")) - sin02 < prec
raises(TypeError, lambda: f(x))
# if this succeeds, it can't be a mpmath function
@conserve_mpmath_dps
@XFAIL
def test_number_precision():
mpmath.mp.dps = 50
sin02 = mpmath.mpf("0.19866933079506121545941262711838975037020672954020")
f = lambdify(x, sin02, "mpmath")
prec = 1e-49 # mpmath precision is around 50 decimal places
assert -prec < f(0) - sin02 < prec
#================== Test Translations ==============================
# We can only check if all translated functions are valid. It has to be checked
# by hand if they are complete.
def test_math_transl():
from sympy.utilities.lambdify import MATH_TRANSLATIONS
for sym, mat in MATH_TRANSLATIONS.items():
assert sym in sympy.__dict__
assert mat in math.__dict__
def test_mpmath_transl():
from sympy.utilities.lambdify import MPMATH_TRANSLATIONS
for sym, mat in MPMATH_TRANSLATIONS.items():
assert sym in sympy.__dict__ or sym == 'Matrix'
assert mat in mpmath.__dict__
def test_numpy_transl():
if not numpy:
skip("numpy not installed.")
from sympy.utilities.lambdify import NUMPY_TRANSLATIONS
for sym, nump in NUMPY_TRANSLATIONS.items():
assert sym in sympy.__dict__
assert nump in numpy.__dict__
def test_numpy_translation_abs():
if not numpy:
skip("numpy not installed.")
f = lambdify(x, Abs(x), "numpy")
assert f(-1) == 1
assert f(1) == 1
def test_numexpr_printer():
if not numexpr:
skip("numexpr not installed.")
# if translation/printing is done incorrectly then evaluating
# a lambdified numexpr expression will throw an exception
from sympy.printing.lambdarepr import NumExprPrinter
from sympy import S
blacklist = ('where', 'complex', 'contains')
arg_tuple = (x, y, z) # some functions take more than one argument
for sym in NumExprPrinter._numexpr_functions.keys():
if sym in blacklist:
continue
ssym = S(sym)
if hasattr(ssym, '_nargs'):
nargs = ssym._nargs[0]
else:
nargs = 1
args = arg_tuple[:nargs]
f = lambdify(args, ssym(*args), modules='numexpr')
assert f(*(1, )*nargs) is not None
def test_issue_9334():
if not numexpr:
skip("numexpr not installed.")
if not numpy:
skip("numpy not installed.")
expr = sympy.S('b*a - sqrt(a**2)')
a, b = sorted(expr.free_symbols, key=lambda s: s.name)
func_numexpr = lambdify((a,b), expr, modules=[numexpr], dummify=False)
foo, bar = numpy.random.random((2, 4))
func_numexpr(foo, bar)
#================== Test some functions ============================
def test_exponentiation():
f = lambdify(x, x**2)
assert f(-1) == 1
assert f(0) == 0
assert f(1) == 1
assert f(-2) == 4
assert f(2) == 4
assert f(2.5) == 6.25
def test_sqrt():
f = lambdify(x, sqrt(x))
assert f(0) == 0.0
assert f(1) == 1.0
assert f(4) == 2.0
assert abs(f(2) - 1.414) < 0.001
assert f(6.25) == 2.5
def test_trig():
f = lambdify([x], [cos(x), sin(x)])
d = f(pi)
prec = 1e-11
assert -prec < d[0] + 1 < prec
assert -prec < d[1] < prec
d = f(3.14159)
prec = 1e-5
assert -prec < d[0] + 1 < prec
assert -prec < d[1] < prec
#================== Test vectors ===================================
def test_vector_simple():
f = lambdify((x, y, z), (z, y, x))
assert f(3, 2, 1) == (1, 2, 3)
assert f(1.0, 2.0, 3.0) == (3.0, 2.0, 1.0)
# make sure correct number of args required
raises(TypeError, lambda: f(0))
def test_vector_discontinuous():
f = lambdify(x, (-1/x, 1/x))
raises(ZeroDivisionError, lambda: f(0))
assert f(1) == (-1.0, 1.0)
assert f(2) == (-0.5, 0.5)
assert f(-2) == (0.5, -0.5)
def test_trig_symbolic():
f = lambdify([x], [cos(x), sin(x)])
d = f(pi)
assert abs(d[0] + 1) < 0.0001
assert abs(d[1] - 0) < 0.0001
def test_trig_float():
f = lambdify([x], [cos(x), sin(x)])
d = f(3.14159)
assert abs(d[0] + 1) < 0.0001
assert abs(d[1] - 0) < 0.0001
def test_docs():
f = lambdify(x, x**2)
assert f(2) == 4
f = lambdify([x, y, z], [z, y, x])
assert f(1, 2, 3) == [3, 2, 1]
f = lambdify(x, sqrt(x))
assert f(4) == 2.0
f = lambdify((x, y), sin(x*y)**2)
assert f(0, 5) == 0
def test_math():
f = lambdify((x, y), sin(x), modules="math")
assert f(0, 5) == 0
def test_sin():
f = lambdify(x, sin(x)**2)
assert isinstance(f(2), float)
f = lambdify(x, sin(x)**2, modules="math")
assert isinstance(f(2), float)
def test_matrix():
A = Matrix([[x, x*y], [sin(z) + 4, x**z]])
sol = Matrix([[1, 2], [sin(3) + 4, 1]])
f = lambdify((x, y, z), A, modules="sympy")
assert f(1, 2, 3) == sol
f = lambdify((x, y, z), (A, [A]), modules="sympy")
assert f(1, 2, 3) == (sol, [sol])
J = Matrix((x, x + y)).jacobian((x, y))
v = Matrix((x, y))
sol = Matrix([[1, 0], [1, 1]])
assert lambdify(v, J, modules='sympy')(1, 2) == sol
assert lambdify(v.T, J, modules='sympy')(1, 2) == sol
def test_numpy_matrix():
if not numpy:
skip("numpy not installed.")
A = Matrix([[x, x*y], [sin(z) + 4, x**z]])
sol_arr = numpy.array([[1, 2], [numpy.sin(3) + 4, 1]])
#Lambdify array first, to ensure return to array as default
f = lambdify((x, y, z), A, ['numpy'])
numpy.testing.assert_allclose(f(1, 2, 3), sol_arr)
#Check that the types are arrays and matrices
assert isinstance(f(1, 2, 3), numpy.ndarray)
def test_numpy_transpose():
if not numpy:
skip("numpy not installed.")
A = Matrix([[1, x], [0, 1]])
f = lambdify((x), A.T, modules="numpy")
numpy.testing.assert_array_equal(f(2), numpy.array([[1, 0], [2, 1]]))
def test_numpy_inverse():
if not numpy:
skip("numpy not installed.")
A = Matrix([[1, x], [0, 1]])
f = lambdify((x), A**-1, modules="numpy")
numpy.testing.assert_array_equal(f(2), numpy.array([[1, -2], [0, 1]]))
def test_numpy_old_matrix():
if not numpy:
skip("numpy not installed.")
A = Matrix([[x, x*y], [sin(z) + 4, x**z]])
sol_arr = numpy.array([[1, 2], [numpy.sin(3) + 4, 1]])
f = lambdify((x, y, z), A, [{'ImmutableMatrix': numpy.matrix}, 'numpy'])
numpy.testing.assert_allclose(f(1, 2, 3), sol_arr)
assert isinstance(f(1, 2, 3), numpy.matrix)
def test_numpy_piecewise():
if not numpy:
skip("numpy not installed.")
pieces = Piecewise((x, x < 3), (x**2, x > 5), (0, True))
f = lambdify(x, pieces, modules="numpy")
numpy.testing.assert_array_equal(f(numpy.arange(10)),
numpy.array([0, 1, 2, 0, 0, 0, 36, 49, 64, 81]))
# If we evaluate somewhere all conditions are False, we should get back NaN
nodef_func = lambdify(x, Piecewise((x, x > 0), (-x, x < 0)))
numpy.testing.assert_array_equal(nodef_func(numpy.array([-1, 0, 1])),
numpy.array([1, numpy.nan, 1]))
def test_numpy_logical_ops():
if not numpy:
skip("numpy not installed.")
and_func = lambdify((x, y), And(x, y), modules="numpy")
or_func = lambdify((x, y), Or(x, y), modules="numpy")
not_func = lambdify((x), Not(x), modules="numpy")
arr1 = numpy.array([True, True])
arr2 = numpy.array([False, True])
numpy.testing.assert_array_equal(and_func(arr1, arr2), numpy.array([False, True]))
numpy.testing.assert_array_equal(or_func(arr1, arr2), numpy.array([True, True]))
numpy.testing.assert_array_equal(not_func(arr2), numpy.array([True, False]))
def test_numpy_matmul():
if not numpy:
skip("numpy not installed.")
xmat = Matrix([[x, y], [z, 1+z]])
ymat = Matrix([[x**2], [Abs(x)]])
mat_func = lambdify((x, y, z), xmat*ymat, modules="numpy")
numpy.testing.assert_array_equal(mat_func(0.5, 3, 4), numpy.array([[1.625], [3.5]]))
numpy.testing.assert_array_equal(mat_func(-0.5, 3, 4), numpy.array([[1.375], [3.5]]))
# Multiple matrices chained together in multiplication
f = lambdify((x, y, z), xmat*xmat*xmat, modules="numpy")
numpy.testing.assert_array_equal(f(0.5, 3, 4), numpy.array([[72.125, 119.25],
[159, 251]]))
def test_numpy_numexpr():
if not numpy:
skip("numpy not installed.")
if not numexpr:
skip("numexpr not installed.")
a, b, c = numpy.random.randn(3, 128, 128)
# ensure that numpy and numexpr return same value for complicated expression
expr = sin(x) + cos(y) + tan(z)**2 + Abs(z-y)*acos(sin(y*z)) + \
Abs(y-z)*acosh(2+exp(y-x))- sqrt(x**2+I*y**2)
npfunc = lambdify((x, y, z), expr, modules='numpy')
nefunc = lambdify((x, y, z), expr, modules='numexpr')
assert numpy.allclose(npfunc(a, b, c), nefunc(a, b, c))
def test_numexpr_userfunctions():
if not numpy:
skip("numpy not installed.")
if not numexpr:
skip("numexpr not installed.")
a, b = numpy.random.randn(2, 10)
uf = type('uf', (Function, ),
{'eval' : classmethod(lambda x, y : y**2+1)})
func = lambdify(x, 1-uf(x), modules='numexpr')
assert numpy.allclose(func(a), -(a**2))
uf = implemented_function(Function('uf'), lambda x, y : 2*x*y+1)
func = lambdify((x, y), uf(x, y), modules='numexpr')
assert numpy.allclose(func(a, b), 2*a*b+1)
def test_integral():
f = Lambda(x, exp(-x**2))
l = lambdify(x, Integral(f(x), (x, -oo, oo)), modules="sympy")
assert l(x) == Integral(exp(-x**2), (x, -oo, oo))
#================== Test symbolic ==================================
def test_sym_single_arg():
f = lambdify(x, x * y)
assert f(z) == z * y
def test_sym_list_args():
f = lambdify([x, y], x + y + z)
assert f(1, 2) == 3 + z
def test_sym_integral():
f = Lambda(x, exp(-x**2))
l = lambdify(x, Integral(f(x), (x, -oo, oo)), modules="sympy")
assert l(y).doit() == sqrt(pi)
def test_namespace_order():
# lambdify had a bug, such that module dictionaries or cached module
# dictionaries would pull earlier namespaces into themselves.
# Because the module dictionaries form the namespace of the
# generated lambda, this meant that the behavior of a previously
# generated lambda function could change as a result of later calls
# to lambdify.
n1 = {'f': lambda x: 'first f'}
n2 = {'f': lambda x: 'second f',
'g': lambda x: 'function g'}
f = sympy.Function('f')
g = sympy.Function('g')
if1 = lambdify(x, f(x), modules=(n1, "sympy"))
assert if1(1) == 'first f'
if2 = lambdify(x, g(x), modules=(n2, "sympy"))
# previously gave 'second f'
assert if1(1) == 'first f'
def test_imps():
# Here we check if the default returned functions are anonymous - in
# the sense that we can have more than one function with the same name
f = implemented_function('f', lambda x: 2*x)
g = implemented_function('f', lambda x: math.sqrt(x))
l1 = lambdify(x, f(x))
l2 = lambdify(x, g(x))
assert str(f(x)) == str(g(x))
assert l1(3) == 6
assert l2(3) == math.sqrt(3)
# check that we can pass in a Function as input
func = sympy.Function('myfunc')
assert not hasattr(func, '_imp_')
my_f = implemented_function(func, lambda x: 2*x)
assert hasattr(func, '_imp_')
# Error for functions with same name and different implementation
f2 = implemented_function("f", lambda x: x + 101)
raises(ValueError, lambda: lambdify(x, f(f2(x))))
def test_imps_wrong_args():
raises(ValueError, lambda: implemented_function(sin, lambda x: x))
def test_lambdify_imps():
# Test lambdify with implemented functions
# first test basic (sympy) lambdify
f = sympy.cos
assert lambdify(x, f(x))(0) == 1
assert lambdify(x, 1 + f(x))(0) == 2
assert lambdify((x, y), y + f(x))(0, 1) == 2
# make an implemented function and test
f = implemented_function("f", lambda x: x + 100)
assert lambdify(x, f(x))(0) == 100
assert lambdify(x, 1 + f(x))(0) == 101
assert lambdify((x, y), y + f(x))(0, 1) == 101
# Can also handle tuples, lists, dicts as expressions
lam = lambdify(x, (f(x), x))
assert lam(3) == (103, 3)
lam = lambdify(x, [f(x), x])
assert lam(3) == [103, 3]
lam = lambdify(x, [f(x), (f(x), x)])
assert lam(3) == [103, (103, 3)]
lam = lambdify(x, {f(x): x})
assert lam(3) == {103: 3}
lam = lambdify(x, {f(x): x})
assert lam(3) == {103: 3}
lam = lambdify(x, {x: f(x)})
assert lam(3) == {3: 103}
# Check that imp preferred to other namespaces by default
d = {'f': lambda x: x + 99}
lam = lambdify(x, f(x), d)
assert lam(3) == 103
# Unless flag passed
lam = lambdify(x, f(x), d, use_imps=False)
assert lam(3) == 102
def test_dummification():
t = symbols('t')
F = Function('F')
G = Function('G')
#"\alpha" is not a valid python variable name
#lambdify should sub in a dummy for it, and return
#without a syntax error
alpha = symbols(r'\alpha')
some_expr = 2 * F(t)**2 / G(t)
lam = lambdify((F(t), G(t)), some_expr)
assert lam(3, 9) == 2
lam = lambdify(sin(t), 2 * sin(t)**2)
assert lam(F(t)) == 2 * F(t)**2
#Test that \alpha was properly dummified
lam = lambdify((alpha, t), 2*alpha + t)
assert lam(2, 1) == 5
raises(SyntaxError, lambda: lambdify(F(t) * G(t), F(t) * G(t) + 5))
raises(SyntaxError, lambda: lambdify(2 * F(t), 2 * F(t) + 5))
raises(SyntaxError, lambda: lambdify(2 * F(t), 4 * F(t) + 5))
def test_python_keywords():
# Test for issue 7452. The automatic dummification should ensure use of
# Python reserved keywords as symbol names will create valid lambda
# functions. This is an additional regression test.
python_if = symbols('if')
expr = python_if / 2
f = lambdify(python_if, expr)
assert f(4.0) == 2.0
def test_lambdify_docstring():
func = lambdify((w, x, y, z), w + x + y + z)
assert func.__doc__ == (
"Created with lambdify. Signature:\n\n"
"func(w, x, y, z)\n\n"
"Expression:\n\n"
"w + x + y + z")
syms = symbols('a1:26')
func = lambdify(syms, sum(syms))
assert func.__doc__ == (
"Created with lambdify. Signature:\n\n"
"func(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15,\n"
" a16, a17, a18, a19, a20, a21, a22, a23, a24, a25)\n\n"
"Expression:\n\n"
"a1 + a10 + a11 + a12 + a13 + a14 + a15 + a16 + a17 + a18 + a19 + a2 + a20 +...")
#================== Test special printers ==========================
def test_special_printers():
class IntervalPrinter(LambdaPrinter):
"""Use ``lambda`` printer but print numbers as ``mpi`` intervals. """
def _print_Integer(self, expr):
return "mpi('%s')" % super(IntervalPrinter, self)._print_Integer(expr)
def _print_Rational(self, expr):
return "mpi('%s')" % super(IntervalPrinter, self)._print_Rational(expr)
def intervalrepr(expr):
return IntervalPrinter().doprint(expr)
expr = sympy.sqrt(sympy.sqrt(2) + sympy.sqrt(3)) + sympy.S(1)/2
func0 = lambdify((), expr, modules="mpmath", printer=intervalrepr)
func1 = lambdify((), expr, modules="mpmath", printer=IntervalPrinter)
func2 = lambdify((), expr, modules="mpmath", printer=IntervalPrinter())
mpi = type(mpmath.mpi(1, 2))
assert isinstance(func0(), mpi)
assert isinstance(func1(), mpi)
assert isinstance(func2(), mpi)
def test_true_false():
# We want exact is comparison here, not just ==
assert lambdify([], true)() is True
assert lambdify([], false)() is False
def test_issue_2790():
assert lambdify((x, (y, z)), x + y)(1, (2, 4)) == 3
assert lambdify((x, (y, (w, z))), w + x + y + z)(1, (2, (3, 4))) == 10
assert lambdify(x, x + 1, dummify=False)(1) == 2
def test_ITE():
assert lambdify((x, y, z), ITE(x, y, z))(True, 5, 3) == 5
assert lambdify((x, y, z), ITE(x, y, z))(False, 5, 3) == 3
def test_Min_Max():
# see gh-10375
assert lambdify((x, y, z), Min(x, y, z))(1, 2, 3) == 1
assert lambdify((x, y, z), Max(x, y, z))(1, 2, 3) == 3
| 32.817259
| 93
| 0.588244
|
795364695f8097ba6259b0e646c765192dd0f96b
| 1,181
|
py
|
Python
|
snowshu/samplings/sample_sizes/brute_force_sample_size.py
|
norton120/snowshu
|
3595972a2ab28350f0283c3703adc1ca4b26bec2
|
[
"Apache-2.0"
] | null | null | null |
snowshu/samplings/sample_sizes/brute_force_sample_size.py
|
norton120/snowshu
|
3595972a2ab28350f0283c3703adc1ca4b26bec2
|
[
"Apache-2.0"
] | 28
|
2020-02-06T18:37:51.000Z
|
2020-02-06T18:38:34.000Z
|
snowshu/samplings/sample_sizes/brute_force_sample_size.py
|
norton120/snowshu
|
3595972a2ab28350f0283c3703adc1ca4b26bec2
|
[
"Apache-2.0"
] | null | null | null |
import math
from snowshu.core.samplings.bases.base_sample_size import BaseSampleSize
class BruteForceSampleSize(BaseSampleSize):
"""Implements a static percentage sample size.
Args:
percentage: The decimal representation of the desired sample size between 1 and 99% (0.01 to 0.99).
"""
def __init__(self,
percentage:float):
self._percentage=percentage
@property
def percentage(self)->float:
return self._percentage
@percentage.setter
def percentage(self,val:float)->None:
"""validates percentage between 1 and 99% before setting."""
if (0.01 <= val <= 0.99):
self._percentage=val
else:
raise ValueError(f"Percentage must be between 0.01 and 0.99, is {val}")
def size(self,population:int)->int:
"""Calculates the sample size for a given population size.
Args:
population: The count of records in the full population.
Returns:
The minimum whole number of elements for a sample size given the instance margin of error and confidence.
"""
return math.ceil(population * self.percentage)
| 30.282051
| 117
| 0.651143
|
795364f421f0374ed701e037313d2d1cd2731a45
| 858
|
py
|
Python
|
webui/app/controller.py
|
ahmadika/sce-domain-discovery
|
034210b5c32fbf364c6501bff5df06f50f63b87c
|
[
"Apache-2.0"
] | null | null | null |
webui/app/controller.py
|
ahmadika/sce-domain-discovery
|
034210b5c32fbf364c6501bff5df06f50f63b87c
|
[
"Apache-2.0"
] | 1
|
2017-06-08T17:53:51.000Z
|
2017-06-08T17:53:52.000Z
|
webui/app/controller.py
|
ahmadika/sce-domain-discovery
|
034210b5c32fbf364c6501bff5df06f50f63b87c
|
[
"Apache-2.0"
] | 2
|
2018-07-02T16:31:42.000Z
|
2018-07-02T16:53:26.000Z
|
from flask import Blueprint, request, render_template, redirect, url_for, send_from_directory
from app import classifier
# Define Blueprint(s)
mod_app = Blueprint('application', __name__, url_prefix='/explorer')
# Define Controller(s)
@mod_app.route('/')
def index():
return send_from_directory('static/pages', 'index.html')
# POST Requests
@mod_app.route('/classify/update/', methods=['POST'])
def build_model():
annotations = []
data = request.get_data()
for item in data.split('&'):
annotations.append(int(item.split('=')[1]))
accuracy = classifier.update_model(annotations)
return accuracy
@mod_app.route('/classify/download/', methods=['POST'])
def download_model():
return classifier.export_model()
@mod_app.route('/classify/exist/', methods=['POST'])
def check_model():
return classifier.check_model()
| 26
| 93
| 0.712121
|
79536533e8b3c75e4208745b506205e451cab722
| 5,180
|
py
|
Python
|
dlcv/object_detection/tensorflow_detect/core/region_similarity_calculator.py
|
Loonride/deeplens-cv
|
9e5b31c1a269d364e4912ba8266415fa04277e11
|
[
"MIT"
] | 11
|
2019-10-07T22:06:30.000Z
|
2020-08-26T22:10:53.000Z
|
dlcv/object_detection/tensorflow_detect/core/region_similarity_calculator.py
|
Loonride/deeplens-cv
|
9e5b31c1a269d364e4912ba8266415fa04277e11
|
[
"MIT"
] | 16
|
2019-11-02T00:32:00.000Z
|
2022-02-10T00:23:32.000Z
|
dlcv/object_detection/tensorflow_detect/core/region_similarity_calculator.py
|
Loonride/deeplens-cv
|
9e5b31c1a269d364e4912ba8266415fa04277e11
|
[
"MIT"
] | 9
|
2019-10-07T13:33:13.000Z
|
2020-09-27T09:50:58.000Z
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Region Similarity Calculators for BoxLists.
Region Similarity Calculators compare a pairwise measure of similarity
between the boxes in two BoxLists.
"""
from abc import ABCMeta
from abc import abstractmethod
import tensorflow as tf
from object_detection.tensorflow_detect.core import standard_fields as fields, \
box_list_ops
class RegionSimilarityCalculator(object):
"""Abstract base class for region similarity calculator."""
__metaclass__ = ABCMeta
def compare(self, boxlist1, boxlist2, scope=None):
"""Computes matrix of pairwise similarity between BoxLists.
This op (to be overridden) computes a measure of pairwise similarity between
the boxes in the given BoxLists. Higher values indicate more similarity.
Note that this method simply measures similarity and does not explicitly
perform a matching.
Args:
boxlist1: BoxList holding N boxes.
boxlist2: BoxList holding M boxes.
scope: Op scope name. Defaults to 'Compare' if None.
Returns:
a (float32) tensor of shape [N, M] with pairwise similarity score.
"""
with tf.name_scope(scope, 'Compare', [boxlist1, boxlist2]) as scope:
return self._compare(boxlist1, boxlist2)
@abstractmethod
def _compare(self, boxlist1, boxlist2):
pass
class IouSimilarity(RegionSimilarityCalculator):
"""Class to compute similarity based on Intersection over Union (IOU) metric.
This class computes pairwise similarity between two BoxLists based on IOU.
"""
def _compare(self, boxlist1, boxlist2):
"""Compute pairwise IOU similarity between the two BoxLists.
Args:
boxlist1: BoxList holding N boxes.
boxlist2: BoxList holding M boxes.
Returns:
A tensor with shape [N, M] representing pairwise iou scores.
"""
return box_list_ops.iou(boxlist1, boxlist2)
class NegSqDistSimilarity(RegionSimilarityCalculator):
"""Class to compute similarity based on the squared distance metric.
This class computes pairwise similarity between two BoxLists based on the
negative squared distance metric.
"""
def _compare(self, boxlist1, boxlist2):
"""Compute matrix of (negated) sq distances.
Args:
boxlist1: BoxList holding N boxes.
boxlist2: BoxList holding M boxes.
Returns:
A tensor with shape [N, M] representing negated pairwise squared distance.
"""
return -1 * box_list_ops.sq_dist(boxlist1, boxlist2)
class IoaSimilarity(RegionSimilarityCalculator):
"""Class to compute similarity based on Intersection over Area (IOA) metric.
This class computes pairwise similarity between two BoxLists based on their
pairwise intersections divided by the areas of second BoxLists.
"""
def _compare(self, boxlist1, boxlist2):
"""Compute pairwise IOA similarity between the two BoxLists.
Args:
boxlist1: BoxList holding N boxes.
boxlist2: BoxList holding M boxes.
Returns:
A tensor with shape [N, M] representing pairwise IOA scores.
"""
return box_list_ops.ioa(boxlist1, boxlist2)
class ThresholdedIouSimilarity(RegionSimilarityCalculator):
"""Class to compute similarity based on thresholded IOU and score.
This class computes pairwise similarity between two BoxLists based on IOU and
a 'score' present in boxlist1. If IOU > threshold, then the entry in the
output pairwise tensor will contain `score`, otherwise 0.
"""
def __init__(self, iou_threshold=0):
"""Initialize the ThresholdedIouSimilarity.
Args:
iou_threshold: For a given pair of boxes, if the IOU is > iou_threshold,
then the comparison result will be the foreground probability of
the first box, otherwise it will be zero.
"""
self._iou_threshold = iou_threshold
def _compare(self, boxlist1, boxlist2):
"""Compute pairwise IOU similarity between the two BoxLists and score.
Args:
boxlist1: BoxList holding N boxes. Must have a score field.
boxlist2: BoxList holding M boxes.
Returns:
A tensor with shape [N, M] representing scores threholded by pairwise
iou scores.
"""
ious = box_list_ops.iou(boxlist1, boxlist2)
scores = boxlist1.get_field(fields.BoxListFields.scores)
scores = tf.expand_dims(scores, axis=1)
row_replicated_scores = tf.tile(scores, [1, tf.shape(ious)[-1]])
thresholded_ious = tf.where(ious > self._iou_threshold,
row_replicated_scores, tf.zeros_like(ious))
return thresholded_ious
| 33.419355
| 80
| 0.719691
|
7953654b85f3fdd6f8ea9683baad787e953431b3
| 304
|
py
|
Python
|
backend/thunder/__init__.py
|
wikibootup/thunder
|
d617aa498a2d387c1595ac3f580c4875f05d356b
|
[
"MIT"
] | null | null | null |
backend/thunder/__init__.py
|
wikibootup/thunder
|
d617aa498a2d387c1595ac3f580c4875f05d356b
|
[
"MIT"
] | null | null | null |
backend/thunder/__init__.py
|
wikibootup/thunder
|
d617aa498a2d387c1595ac3f580c4875f05d356b
|
[
"MIT"
] | null | null | null |
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import os
app = Flask(__name__, static_url_path='', static_folder='../../front/src/')
app.config.from_object(os.environ['APP_SETTINGS'])
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
from thunder import views
| 25.333333
| 75
| 0.782895
|
7953660337f78f6e90c2956745551d1d836d429e
| 1,890
|
py
|
Python
|
pysper/parser/__init__.py
|
arvy/sperf
|
c047ae5f3b1daf70cc227784197e4ef37caaf556
|
[
"Apache-2.0"
] | null | null | null |
pysper/parser/__init__.py
|
arvy/sperf
|
c047ae5f3b1daf70cc227784197e4ef37caaf556
|
[
"Apache-2.0"
] | null | null | null |
pysper/parser/__init__.py
|
arvy/sperf
|
c047ae5f3b1daf70cc227784197e4ef37caaf556
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 DataStax, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" pysper parser top level. """
from pysper.parser import systemlog, outputlog, block_dev
from pysper import env
def read_system_log(lines, **extras):
"""read the system log, yields an iterable set of events of parsed logs"""
return read_log(lines, systemlog.capture_line, **extras)
def read_output_log(lines, **extras):
"""read the output log, yields an iterable set of events of parsed logs"""
return read_log(lines, outputlog.capture_line, **extras)
def read_block_dev(lines, **extras):
"""consumes a block dev report, yields an iterable set of events of parsed logs"""
return read_log(lines, block_dev.capture_line, **extras)
def _default_capture(line):
"""does nothing interesting but will print out lines if debug is on"""
if env.DEBUG:
print(line)
def read_log(lines, capture_line_func=_default_capture, **extras):
""" parses an iterable set of lines yielding events """
fields = None
for line in lines:
next_fields = capture_line_func(line)
if next_fields is not None:
if fields is not None:
fields.update(extras)
yield fields
fields = next_fields
#need to do this one last time to clear out the last update to next_fields
if fields is not None:
yield fields
| 38.571429
| 86
| 0.710053
|
79536609758e09b8a3092a07af760bdbf4fcfdbd
| 1,768
|
py
|
Python
|
Medium/81.py
|
Hellofafar/Leetcode
|
7a459e9742958e63be8886874904e5ab2489411a
|
[
"CNRI-Python"
] | 6
|
2017-09-25T18:05:50.000Z
|
2019-03-27T00:23:15.000Z
|
Medium/81.py
|
Hellofafar/Leetcode
|
7a459e9742958e63be8886874904e5ab2489411a
|
[
"CNRI-Python"
] | 1
|
2017-10-29T12:04:41.000Z
|
2018-08-16T18:00:37.000Z
|
Medium/81.py
|
Hellofafar/Leetcode
|
7a459e9742958e63be8886874904e5ab2489411a
|
[
"CNRI-Python"
] | null | null | null |
# ------------------------------
# 81. Search in Rotated Sorted Array II
#
# Description:
# Follow up for "Search in Rotated Sorted Array":
# What if duplicates are allowed?
#
# Would this affect the run-time complexity? How and why?
#
# Suppose an array sorted in ascending order is rotated at some pivot unknown to you beforehand.
# (i.e., 0 1 2 4 5 6 7 might become 4 5 6 7 0 1 2).
# Write a function to determine if a given target is in the array.
# The array may contain duplicates.
#
# Version: 1.0
# 01/27/18 by Jianfa
# ------------------------------
class Solution(object):
def search(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: bool
"""
if not nums:
return False
low = 0
high = len(nums) - 1
while low <= high:
mid = (low + high) / 2
if nums[mid] == target:
return True
elif nums[mid] < nums[high]:
if nums[mid] < target and target <= nums[high]:
low = mid + 1
else:
high = mid - 1
elif nums[mid] > nums[high]:
if nums[mid] > target and target >= nums[low]:
high = mid - 1
else:
low = mid + 1
else:
high -= 1
return False
# Used for testing
if __name__ == "__main__":
test = Solution()
nums = [1]
target = 0
print(test.search(nums, target))
# ------------------------------
# Summary:
# Follow the idea from https://leetcode.com/problems/search-in-rotated-sorted-array-ii/discuss/28194.
| 27.625
| 101
| 0.478507
|
7953661c6878a19e5ce9edb68892fc7dadb52410
| 8,703
|
py
|
Python
|
design2.py
|
Egor-Iudin/damped_oscillations
|
fbda2acfd3d6ac6e39315e90cdeb345e263a1f6e
|
[
"MIT"
] | 1
|
2021-10-31T22:55:48.000Z
|
2021-10-31T22:55:48.000Z
|
design2.py
|
Egor-Iudin/damped_oscillations
|
fbda2acfd3d6ac6e39315e90cdeb345e263a1f6e
|
[
"MIT"
] | null | null | null |
design2.py
|
Egor-Iudin/damped_oscillations
|
fbda2acfd3d6ac6e39315e90cdeb345e263a1f6e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'design2.ui'
#
# Created by: PyQt5 UI code generator 5.10.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
MainWindow.setMinimumSize(QtCore.QSize(800, 600))
MainWindow.setMaximumSize(QtCore.QSize(800, 600))
font = QtGui.QFont()
font.setFamily("Ubuntu")
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
MainWindow.setFont(font)
MainWindow.setContextMenuPolicy(QtCore.Qt.DefaultContextMenu)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setMinimumSize(QtCore.QSize(800, 600))
self.centralwidget.setMaximumSize(QtCore.QSize(800, 600))
font = QtGui.QFont()
font.setKerning(True)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.centralwidget.setFont(font)
self.centralwidget.setObjectName("centralwidget")
self.graphicsView = PlotWidget(self.centralwidget)
self.graphicsView.setGeometry(QtCore.QRect(0, 100, 800, 500))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.graphicsView.sizePolicy().hasHeightForWidth())
self.graphicsView.setSizePolicy(sizePolicy)
self.graphicsView.setMinimumSize(QtCore.QSize(800, 500))
self.graphicsView.setMaximumSize(QtCore.QSize(800, 500))
self.graphicsView.setObjectName("graphicsView")
self.splitter = QtWidgets.QSplitter(self.centralwidget)
self.splitter.setGeometry(QtCore.QRect(0, 0, 800, 100))
self.splitter.setMinimumSize(QtCore.QSize(800, 100))
self.splitter.setMaximumSize(QtCore.QSize(812, 104))
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setObjectName("splitter")
self.splitter_4 = QtWidgets.QSplitter(self.splitter)
self.splitter_4.setOrientation(QtCore.Qt.Vertical)
self.splitter_4.setObjectName("splitter_4")
self.label_3 = QtWidgets.QLabel(self.splitter_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(200)
sizePolicy.setVerticalStretch(40)
sizePolicy.setHeightForWidth(self.label_3.sizePolicy().hasHeightForWidth())
self.label_3.setSizePolicy(sizePolicy)
self.label_3.setMinimumSize(QtCore.QSize(200, 40))
self.label_3.setMaximumSize(QtCore.QSize(200, 40))
self.label_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_3.setObjectName("label_3")
self.doubleSpinBox_3 = QtWidgets.QDoubleSpinBox(self.splitter_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(200)
sizePolicy.setVerticalStretch(60)
sizePolicy.setHeightForWidth(self.doubleSpinBox_3.sizePolicy().hasHeightForWidth())
self.doubleSpinBox_3.setSizePolicy(sizePolicy)
self.doubleSpinBox_3.setObjectName("doubleSpinBox_3")
self.splitter_1 = QtWidgets.QSplitter(self.splitter)
self.splitter_1.setOrientation(QtCore.Qt.Vertical)
self.splitter_1.setObjectName("splitter_1")
self.label_1 = QtWidgets.QLabel(self.splitter_1)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_1.sizePolicy().hasHeightForWidth())
self.label_1.setSizePolicy(sizePolicy)
self.label_1.setMinimumSize(QtCore.QSize(200, 40))
self.label_1.setMaximumSize(QtCore.QSize(200, 40))
self.label_1.setAlignment(QtCore.Qt.AlignCenter)
self.label_1.setObjectName("label_1")
self.doubleSpinBox_1 = QtWidgets.QDoubleSpinBox(self.splitter_1)
self.doubleSpinBox_1.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(200)
sizePolicy.setVerticalStretch(60)
sizePolicy.setHeightForWidth(self.doubleSpinBox_1.sizePolicy().hasHeightForWidth())
self.doubleSpinBox_1.setSizePolicy(sizePolicy)
self.doubleSpinBox_1.setMinimumSize(QtCore.QSize(200, 60))
self.doubleSpinBox_1.setMaximumSize(QtCore.QSize(200, 60))
self.doubleSpinBox_1.setObjectName("doubleSpinBox_1")
self.splitter_3 = QtWidgets.QSplitter(self.splitter)
self.splitter_3.setOrientation(QtCore.Qt.Vertical)
self.splitter_3.setObjectName("splitter_3")
self.label_4 = QtWidgets.QLabel(self.splitter_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4.sizePolicy().hasHeightForWidth())
self.label_4.setSizePolicy(sizePolicy)
self.label_4.setMinimumSize(QtCore.QSize(200, 40))
self.label_4.setMaximumSize(QtCore.QSize(200, 40))
self.label_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_4.setObjectName("label_4")
self.doubleSpinBox_4 = QtWidgets.QDoubleSpinBox(self.splitter_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.doubleSpinBox_4.sizePolicy().hasHeightForWidth())
self.doubleSpinBox_4.setSizePolicy(sizePolicy)
self.doubleSpinBox_4.setWrapping(False)
self.doubleSpinBox_4.setObjectName("doubleSpinBox_4")
self.splitter_2 = QtWidgets.QSplitter(self.splitter)
self.splitter_2.setOrientation(QtCore.Qt.Vertical)
self.splitter_2.setObjectName("splitter_2")
self.label_2 = QtWidgets.QLabel(self.splitter_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
self.label_2.setSizePolicy(sizePolicy)
self.label_2.setMinimumSize(QtCore.QSize(200, 40))
self.label_2.setMaximumSize(QtCore.QSize(200, 40))
self.label_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_2.setObjectName("label_2")
self.doubleSpinBox_2 = QtWidgets.QDoubleSpinBox(self.splitter_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(200)
sizePolicy.setVerticalStretch(60)
sizePolicy.setHeightForWidth(self.doubleSpinBox_2.sizePolicy().hasHeightForWidth())
self.doubleSpinBox_2.setSizePolicy(sizePolicy)
self.doubleSpinBox_2.setMinimumSize(QtCore.QSize(200, 60))
self.doubleSpinBox_2.setMaximumSize(QtCore.QSize(200, 60))
self.doubleSpinBox_2.setObjectName("doubleSpinBox_2")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
self.doubleSpinBox_1.editingFinished.connect(self.update_graphics)
self.doubleSpinBox_4.editingFinished.connect(self.update_graphics)
self.doubleSpinBox_3.editingFinished.connect(self.update_graphics)
self.doubleSpinBox_2.editingFinished.connect(self.update_graphics)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Затухающие колебания"))
self.label_3.setText(_translate("MainWindow", "m - масса тела"))
self.label_1.setText(_translate("MainWindow", "A0 - начальная амплитуда"))
self.label_4.setText(_translate("MainWindow", "в - коэффицент затухания"))
self.label_2.setText(_translate("MainWindow", "k - коэффициент жёсткости"))
def update_graphics(self):
pass
from pyqtgraph import PlotWidget
| 54.39375
| 122
| 0.737102
|
7953663d3a88141f775fc9c395e30153b40524db
| 4,770
|
py
|
Python
|
examples/cooperation/cooperation/agents.py
|
cwhd/mesa
|
9d43e589831ad3a1afe9cfb64a887fa2bd9e3cec
|
[
"Apache-2.0"
] | 1
|
2021-04-09T10:55:32.000Z
|
2021-04-09T10:55:32.000Z
|
examples/cooperation/cooperation/agents.py
|
cwhd/mesa
|
9d43e589831ad3a1afe9cfb64a887fa2bd9e3cec
|
[
"Apache-2.0"
] | null | null | null |
examples/cooperation/cooperation/agents.py
|
cwhd/mesa
|
9d43e589831ad3a1afe9cfb64a887fa2bd9e3cec
|
[
"Apache-2.0"
] | null | null | null |
from mesa import Agent
from cooperation.random_walk import RandomWalker
from cooperation.fcmwrapper import FCMAgent
"""
A cow that uses a mental model to decide what to do
Here are some model ids:
#666-99-6969
#123456789
#75-325
"""
class FCMCow(RandomWalker):
def __init__(self, unique_id, pos, model, moore, fcm_result=None, energy=50, is_greedy=False):
super().__init__(unique_id, pos, model, moore=moore)
self.energy = energy
self.greedy = is_greedy
self.fcm_result = fcm_result
#this gets called every tick
def step(self):
#print("FCM RESULT")
#print(self.fcm_result)
self.random_move()
living = True
self.energy -= self.model.metabolism
# Eat if there is grass
eat_chance = self.random.randrange(0, 100) / 100
if self.fcm_result['Food Observation'] > eat_chance:
#first check for grass
this_cell = self.model.grid.get_cell_list_contents([self.pos])
grass_patch = [obj for obj in this_cell
if isinstance(obj, GrassPatch)][0]
if grass_patch.fully_grown:
#the greedy model always returns 1, same effect as procedural model
#cooperative cow
eat_amount = grass_patch.countdown * self.fcm_result['Eat']
grass_patch.countdown += eat_amount
#this line could be how the cow feels, maybe they feel low energy and it causes other behavior?
#self.energy += self.fcm_result['Energy'] * self.model.grass_energy
self.energy += self.model.grass_energy
grass_patch.fully_grown = False
# Death
if self.energy < 0: #self.fcm_result['Energy']
self.model.grid._remove_agent(self.pos, self)
self.model.schedule.remove(self)
living = False
# Reproduce
if living and self.random.random() < self.model.reproduction_threshold:
self.energy -= self.model.reproduction_cost
calf = FCMCow(self.model.next_id(), self.pos, self.model,
self.moore, self.fcm_result, self.energy, self.greedy)
self.model.grid.place_agent(calf, self.pos)
self.model.schedule.add(calf)
# you can have a greedy cow or a cooperative cow
# note that instead of the individual methods in NetLogo everything ends up going into the step function
class Cow(RandomWalker):
def __init__(self, unique_id, pos, model, moore, energy=None, is_greedy=False):
super().__init__(unique_id, pos, model, moore=moore)
self.energy = energy
self.greedy = is_greedy
#this gets called every tick
def step(self):
self.random_move()
living = True
self.energy -= self.model.metabolism
#Eat if there is grass
this_cell = self.model.grid.get_cell_list_contents([self.pos])
grass_patch = [obj for obj in this_cell
if isinstance(obj, GrassPatch)][0]
if grass_patch.fully_grown:
self.energy += self.model.grass_energy
grass_patch.fully_grown = False
if self.greedy:
grass_patch.countdown = 0
else:
grass_patch.countdown -= 1
# Death
if self.energy < 0:
self.model.grid._remove_agent(self.pos, self)
self.model.schedule.remove(self)
living = False
# Reproduce
if living and self.random.random() < self.model.reproduction_threshold:
self.energy -= self.model.reproduction_cost
calf = Cow(self.model.next_id(), self.pos, self.model,
self.moore, self.energy)
self.model.grid.place_agent(calf, self.pos)
self.model.schedule.add(calf)
#I borrowed this from the wolf and sheep model.
class GrassPatch(Agent):
'''
A patch of grass that grows at a fixed rate and it is eaten by sheep
'''
def __init__(self, unique_id, pos, model, fully_grown, countdown):
'''
Creates a new patch of grass
Args:
grown: (boolean) Whether the patch of grass is fully grown or not
countdown: Time for the patch of grass to be fully grown again
'''
super().__init__(unique_id, model)
self.fully_grown = fully_grown
self.countdown = countdown
self.pos = pos
def step(self):
if not self.fully_grown:
if self.countdown <= 0:
# Set as fully grown
self.fully_grown = True
self.countdown = self.model.grass_regrowth_time
else:
self.countdown -= 1
| 37.559055
| 111
| 0.606709
|
7953668e6e6ccaf659cdb0a73a69ffc8db3dac02
| 302
|
py
|
Python
|
config/local.py
|
uzura8/flaskbird
|
6f6bfc5c03286809f103d01a20935c9ce5f93e8f
|
[
"MIT"
] | null | null | null |
config/local.py
|
uzura8/flaskbird
|
6f6bfc5c03286809f103d01a20935c9ce5f93e8f
|
[
"MIT"
] | null | null | null |
config/local.py
|
uzura8/flaskbird
|
6f6bfc5c03286809f103d01a20935c9ce5f93e8f
|
[
"MIT"
] | null | null | null |
from config import BaseConfig
class LocalConfig(BaseConfig):
TESTING = False
DEBUG = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
SQLALCHEMY_ECHO = True
IS_LOGGING_MAIL = True
# Request params
PARAMS_LIST_DEFAULT = {
'per_page': 3,
'per_page_max': 10,
}
| 20.133333
| 41
| 0.662252
|
7953671b0dab0ca91b4de7a2b056dd9e7141a45a
| 3,090
|
py
|
Python
|
src/fast_playlist/views.py
|
dmvieira/gmusic-playlist
|
d8114c29aa80985bd3a8725bed76449410bb3d0f
|
[
"Apache-2.0"
] | null | null | null |
src/fast_playlist/views.py
|
dmvieira/gmusic-playlist
|
d8114c29aa80985bd3a8725bed76449410bb3d0f
|
[
"Apache-2.0"
] | null | null | null |
src/fast_playlist/views.py
|
dmvieira/gmusic-playlist
|
d8114c29aa80985bd3a8725bed76449410bb3d0f
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render
# Create your views here.
# form for donation
#<form action="https://www.paypal.com/cgi-bin/webscr" method="post" target="_top">
#<input type="hidden" name="cmd" value="_s-xclick">
#<input type="hidden" name="encrypted" value="-----BEGIN PKCS7-----MIIHTwYJKoZIhvcNAQcEoIIHQDCCBzwCAQExggEwMIIBLAIBADCBlDCBjjELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAkNBMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRQwEgYDVQQKEwtQYXlQYWwgSW5jLjETMBEGA1UECxQKbGl2ZV9jZXJ0czERMA8GA1UEAxQIbGl2ZV9hcGkxHDAaBgkqhkiG9w0BCQEWDXJlQHBheXBhbC5jb20CAQAwDQYJKoZIhvcNAQEBBQAEgYAeeAyf+2WQMHuM3Sd51AIq+MBRH7LWKSeir+4wZ+0Py3adpJsDrMjCTS5x7HCXGJ4MspmkkQeueQc8M64nYb+ww7EMNLYVYAr2Vy2+xXzKen2knALdQBcS+1RI4bopzmVDtPq7uUDRJFybNHaMhbvv7RUBYoZq/6IUWlx4zyxGlzELMAkGBSsOAwIaBQAwgcwGCSqGSIb3DQEHATAUBggqhkiG9w0DBwQIyK4MWBeRrniAgaiK31FaGzS1XpvXHsa1P7ZojQpDoXtIJcZyBnyVuubkZmMG08HMLvCZuXGSEYtZsSOauBQxXKhoeajAp/q6d51ndjGAsnR9gjV2bvSkFLuTOM73tbqdEjQClPfv8W7qBHbtM6R9ndbwYqWT/rc4mGSGYxi+yCkxd1UmIy68rqstDCzzXRvv6SpSQsZ2C0pCgDPco1Ggf0LZNbMqXqOUrvXNfFrE3Tmn2dWgggOHMIIDgzCCAuygAwIBAgIBADANBgkqhkiG9w0BAQUFADCBjjELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAkNBMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRQwEgYDVQQKEwtQYXlQYWwgSW5jLjETMBEGA1UECxQKbGl2ZV9jZXJ0czERMA8GA1UEAxQIbGl2ZV9hcGkxHDAaBgkqhkiG9w0BCQEWDXJlQHBheXBhbC5jb20wHhcNMDQwMjEzMTAxMzE1WhcNMzUwMjEzMTAxMzE1WjCBjjELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAkNBMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRQwEgYDVQQKEwtQYXlQYWwgSW5jLjETMBEGA1UECxQKbGl2ZV9jZXJ0czERMA8GA1UEAxQIbGl2ZV9hcGkxHDAaBgkqhkiG9w0BCQEWDXJlQHBheXBhbC5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMFHTt38RMxLXJyO2SmS+Ndl72T7oKJ4u4uw+6awntALWh03PewmIJuzbALScsTS4sZoS1fKciBGoh11gIfHzylvkdNe/hJl66/RGqrj5rFb08sAABNTzDTiqqNpJeBsYs/c2aiGozptX2RlnBktH+SUNpAajW724Nv2Wvhif6sFAgMBAAGjge4wgeswHQYDVR0OBBYEFJaffLvGbxe9WT9S1wob7BDWZJRrMIG7BgNVHSMEgbMwgbCAFJaffLvGbxe9WT9S1wob7BDWZJRroYGUpIGRMIGOMQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExFjAUBgNVBAcTDU1vdW50YWluIFZpZXcxFDASBgNVBAoTC1BheVBhbCBJbmMuMRMwEQYDVQQLFApsaXZlX2NlcnRzMREwDwYDVQQDFAhsaXZlX2FwaTEcMBoGCSqGSIb3DQEJARYNcmVAcGF5cGFsLmNvbYIBADAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GBAIFfOlaagFrl71+jq6OKidbWFSE+Q4FqROvdgIONth+8kSK//Y/4ihuE4Ymvzn5ceE3S/iBSQQMjyvb+s2TWbQYDwcp129OPIbD9epdr4tJOUNiSojw7BHwYRiPh58S1xGlFgHFXwrEBb3dgNbMUa+u4qectsMAXpVHnD9wIyfmHMYIBmjCCAZYCAQEwgZQwgY4xCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEUMBIGA1UEChMLUGF5UGFsIEluYy4xEzARBgNVBAsUCmxpdmVfY2VydHMxETAPBgNVBAMUCGxpdmVfYXBpMRwwGgYJKoZIhvcNAQkBFg1yZUBwYXlwYWwuY29tAgEAMAkGBSsOAwIaBQCgXTAYBgkqhkiG9w0BCQMxCwYJKoZIhvcNAQcBMBwGCSqGSIb3DQEJBTEPFw0xNTA0MjYyMjE5MTVaMCMGCSqGSIb3DQEJBDEWBBSW7Du6dcab72VRjet/DzQ3/a/5VDANBgkqhkiG9w0BAQEFAASBgJ1eElllN/KVVfZIIGa6dn97Osl04/DrhbmuQwXKYS/xAnvu4OtgybrP4AVNsi6t1N+/uakC5VYlJ6WIElNyZZAru+QyUnfGhCCqwMV9bEmkk2bSxWWwBCT50Q26ifDz+h7zG73MSFl25qRzxgi9cyIDXErI4KQ9eCXkiQZvNvB+-----END PKCS7-----
#">
#<input type="image" src="https://www.paypalobjects.com/en_US/i/btn/btn_donateCC_LG.gif" border="0" name="submit" alt="PayPal - The safer, easier way to pay online!">
#<img alt="" border="0" src="https://www.paypalobjects.com/pt_BR/i/scr/pixel.gif" width="1" height="1">
#</form>
| 237.692308
| 2,586
| 0.928155
|
7953684caec750d940ed592ddcef574b40d1bfbd
| 5,400
|
py
|
Python
|
benchmarks/benchmarks.py
|
jdcourcol/NeuroM
|
5a654f6fb00b930391035f46bbbc1a39a11551fb
|
[
"BSD-3-Clause"
] | 1
|
2016-10-25T09:23:16.000Z
|
2016-10-25T09:23:16.000Z
|
benchmarks/benchmarks.py
|
jdcourcol/NeuroM
|
5a654f6fb00b930391035f46bbbc1a39a11551fb
|
[
"BSD-3-Clause"
] | null | null | null |
benchmarks/benchmarks.py
|
jdcourcol/NeuroM
|
5a654f6fb00b930391035f46bbbc1a39a11551fb
|
[
"BSD-3-Clause"
] | null | null | null |
from pathlib import Path
import neurom as nm
import neurom.io
import neurom.fst._core
from neurom.check import neuron_checks as nc
from neurom.check import structural_checks as sc
DATA_DIR = Path(__file__).parent.parent / 'test_data/'
class TimeLoadMorphology(object):
def time_swc(self):
path = Path(DATA_DIR, 'swc/Neuron.swc')
nm.load_neuron(path)
def time_neurolucida_asc(self):
path = Path(DATA_DIR, 'neurolucida/bio_neuron-000.asc')
nm.load_neuron(path)
def time_h5(self):
path = Path(DATA_DIR, 'h5/v1/bio_neuron-000.h5')
nm.load_neuron(path)
class TimeFeatures(object):
def setup(self):
path = Path(DATA_DIR, 'h5/v1/bio_neuron-000.h5')
self.neuron = nm.load_neuron(path)
def time_total_length(self):
nm.get('total_length', self.neuron)
def time_total_length_per_neurite(self):
nm.get('total_length_per_neurite', self.neuron)
def time_section_lengths(self):
nm.get('section_lengths', self.neuron)
def time_section_volumes(self):
nm.get('section_volumes', self.neuron)
def time_section_areas(self):
nm.get('section_areas', self.neuron)
def time_section_tortuosity(self):
nm.get('section_tortuosity', self.neuron)
def time_section_path_distances(self):
nm.get('section_path_distances', self.neuron)
def time_number_of_sections(self):
nm.get('number_of_sections', self.neuron)
def time_number_of_sections_per_neurite(self):
nm.get('number_of_sections_per_neurite', self.neuron)
def time_number_of_neurites(self):
nm.get('number_of_neurites', self.neuron)
def time_number_of_bifurcations(self):
nm.get('number_of_bifurcations', self.neuron)
def time_number_of_forking_points(self):
nm.get('number_of_forking_points', self.neuron)
def time_number_of_terminations(self):
nm.get('number_of_terminations', self.neuron)
def time_section_branch_orders(self):
nm.get('section_branch_orders', self.neuron)
def time_section_radial_distances(self):
nm.get('section_radial_distances', self.neuron)
def time_local_bifurcation_angles(self):
nm.get('local_bifurcation_angles', self.neuron)
def time_remote_bifurcation_angles(self):
nm.get('remote_bifurcation_angles', self.neuron)
def time_partition(self):
nm.get('partition', self.neuron)
def time_number_of_segments(self):
nm.get('number_of_segments', self.neuron)
def time_segment_lengths(self):
nm.get('segment_lengths', self.neuron)
def time_segment_radii(self):
nm.get('segment_radii', self.neuron)
def time_segment_midpoints(self):
nm.get('segment_midpoints', self.neuron)
def time_segment_taper_rates(self):
nm.get('segment_taper_rates', self.neuron)
def time_segment_radial_distances(self):
nm.get('segment_radial_distances', self.neuron)
def time_segment_meander_angles(self):
nm.get('segment_meander_angles', self.neuron)
def time_principal_direction_extents(self):
nm.get('principal_direction_extents', self.neuron)
def time_sholl_frequency(self):
nm.get('sholl_frequency', self.neuron)
class TimeChecks:
def setup(self):
path = Path(DATA_DIR, 'h5/v1/bio_neuron-000.h5')
self.data_wrapper = neurom.io.load_data(path)
self.neuron = neurom.fst._core.FstNeuron(self.data_wrapper)
def time_has_sequential_ids(self):
sc.has_sequential_ids(self.data_wrapper)
def time_no_missing_parents(self):
sc.no_missing_parents(self.data_wrapper)
def time_is_single_tree(self):
sc.is_single_tree(self.data_wrapper)
def time_has_increasing_ids(self):
sc.has_increasing_ids(self.data_wrapper)
def time_has_soma_points(self):
sc.has_soma_points(self.data_wrapper)
def time_has_all_finite_radius_neurites(self):
sc.has_all_finite_radius_neurites(self.data_wrapper, threshold=0.0)
def time_has_valid_soma(self):
sc.has_valid_soma(self.data_wrapper)
def time_has_valid_neurites(self):
sc.has_valid_neurites(self.data_wrapper)
def time_has_axon(self):
nc.has_axon(self.neuron)
def time_has_apical_dendrite(self):
nc.has_apical_dendrite(self.neuron, min_number=1)
def time_has_basal_dendrite(self):
nc.has_basal_dendrite(self.neuron, min_number=1)
def time_has_no_flat_neurites(self):
nc.has_no_flat_neurites(self.neuron, tol=0.1, method='ratio')
def time_has_all_monotonic_neurites(self):
nc.has_all_monotonic_neurites(self.neuron, tol=1e-6)
def time_has_all_nonzero_segment_lengths(self):
nc.has_all_nonzero_segment_lengths(self.neuron, threshold=0.0)
def time_has_all_nonzero_section_lengths(self):
nc.has_all_nonzero_section_lengths(self.neuron, threshold=0.0)
def time_has_all_nonzero_neurite_radii(self):
nc.has_all_nonzero_neurite_radii(self.neuron, threshold=0.0)
def time_has_nonzero_soma_radius(self):
nc.has_nonzero_soma_radius(self.neuron, threshold=0.0)
def time_has_no_jumps(self):
nc.has_no_jumps(self.neuron, max_distance=30.0, axis='z')
def time_has_no_fat_ends(self):
nc.has_no_fat_ends(self.neuron, multiple_of_mean=2.0, final_point_count=5)
| 30.857143
| 82
| 0.717222
|
7953687e022ac7c464f3d4f3cb3b26dc37fa332c
| 2,646
|
py
|
Python
|
app.py
|
tjdaley/twitterswitch
|
861ec1ae821d6c445bdcf89e40a78fa76fc5973d
|
[
"BSD-2-Clause"
] | null | null | null |
app.py
|
tjdaley/twitterswitch
|
861ec1ae821d6c445bdcf89e40a78fa76fc5973d
|
[
"BSD-2-Clause"
] | null | null | null |
app.py
|
tjdaley/twitterswitch
|
861ec1ae821d6c445bdcf89e40a78fa76fc5973d
|
[
"BSD-2-Clause"
] | null | null | null |
import RPi.GPIO as GPIO
import json
import time
import twitter
# TODO: Turn these into command line arguments OR config file parameters
HASHTAG = "%23avianaart"
QUERY = f'q={HASHTAG}&result_type=recent&since_id='
PINS = [11, 13, 15, 16] # physical locations on the GPIO strip
def setup_gpio(pins: list):
"""
Set up the GPIO. This will elect to address pins by physical location number.
Args:
pins (list): List of pin numbers connected to the relay board.
"""
# Supress warnings
GPIO.setwarnings(False)
# Reference channels by physical pin number.
GPIO.setmode(GPIO.BOARD)
# Set the pins to output mode.
for pin in pins:
GPIO.setup(pin, GPIO.OUT)
def lights_on(pins: list):
"""
Turn the lights on.
"""
for pin in pins:
GPIO.output(pin, GPIO.LOW)
def lights_off(pins: list):
"""
Turn the lights off.
"""
for pin in pins:
GPIO.output(pin, GPIO.HIGH)
def cleanup():
"""
Clean up before exiting.
"""
GPIO.cleanup()
def load_keys():
with open ("../keys.json", "r") as key_file:
keys = json.load(key_file)
return keys
def connect_twitter(config):
"""
Connect to Twitter.
Args:
config (tuple): Contains the parameters needed to authenticate and authorize with Twitter.
Returns:
Reference to connected Twitter service.
"""
api = twitter.Api(consumer_key=config["CONSUMER_KEY"],
consumer_secret=config["CONSUMER_SECRET"],
access_token_key=config["ACCESS_TOKEN_KEY"],
access_token_secret=config["ACCESS_TOKEN_SECRET"],
sleep_on_rate_limit=True)
verified_credentials = api.VerifyCredentials()
# print("Screen name:", verified_credentials.screen_name)
# print("Last Tweet:", verified_credentials.status.text)
return api
def search_twitter(api, last_id: str):
results = []
try:
results = api.GetSearch(raw_query=QUERY+last_id)
except twitter.error.TwitterError as e:
print(str(e))
print(QUERY)
return results
def log_tweet(tweet: dict):
message = f"{tweet.created_at} - {tweet.user.name} at {tweet.user.location}"
print(message)
def main(args):
api = connect_twitter(load_keys())
setup_gpio(PINS)
last_id = "0"
while True:
results = search_twitter(api, last_id)
if len(results) > 0:
# print(results[0].text)
last_id = results[0].id_str
log_tweet(results[0])
lights_on(PINS)
time.sleep(60)
lights_off(PINS)
time.sleep(4)
if __name__ == "__main__":
try:
main(None)
except Exception as e:
print(str(e))
cleanup()
exit()
| 22.05
| 94
| 0.656085
|
795369001d764709b2e4e2794f021be7214c719d
| 12,826
|
py
|
Python
|
tensorforce/agents/double_dqn.py
|
DLPerf/tensorforce
|
33a2d84fa850e8842dfe2cef3901de32cf7cd221
|
[
"Apache-2.0"
] | 1
|
2021-12-25T16:54:16.000Z
|
2021-12-25T16:54:16.000Z
|
tensorforce/agents/double_dqn.py
|
DLPerf/tensorforce
|
33a2d84fa850e8842dfe2cef3901de32cf7cd221
|
[
"Apache-2.0"
] | null | null | null |
tensorforce/agents/double_dqn.py
|
DLPerf/tensorforce
|
33a2d84fa850e8842dfe2cef3901de32cf7cd221
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from collections import OrderedDict
from tensorforce import TensorforceError
from tensorforce.agents import TensorforceAgent
class DoubleDQN(TensorforceAgent):
"""
[Double DQN](https://arxiv.org/abs/1509.06461) agent (specification key: `double_dqn` or
`ddqn`).
Args:
states (specification): States specification
(<span style="color:#C00000"><b>required</b></span>, better implicitly specified via
`environment` argument for `Agent.create(...)`), arbitrarily nested dictionary of state
descriptions (usually taken from `Environment.states()`) with the following attributes:
<ul>
<li><b>type</b> (<i>"bool" | "int" | "float"</i>) – state data type
(<span style="color:#00C000"><b>default</b></span>: "float").</li>
<li><b>shape</b> (<i>int | iter[int]</i>) – state shape
(<span style="color:#C00000"><b>required</b></span>).</li>
<li><b>num_values</b> (<i>int > 0</i>) – number of discrete state values
(<span style="color:#C00000"><b>required</b></span> for type "int").</li>
<li><b>min_value/max_value</b> (<i>float</i>) – minimum/maximum state value
(<span style="color:#00C000"><b>optional</b></span> for type "float").</li>
</ul>
actions (specification): Actions specification
(<span style="color:#C00000"><b>required</b></span>, better implicitly specified via
`environment` argument for `Agent.create(...)`), arbitrarily nested dictionary of
action descriptions (usually taken from `Environment.actions()`) with the following
attributes:
<ul>
<li><b>type</b> (<i>"bool" | "int" | "float"</i>) – action data type
(<span style="color:#C00000"><b>required</b></span>).</li>
<li><b>shape</b> (<i>int > 0 | iter[int > 0]</i>) – action shape
(<span style="color:#00C000"><b>default</b></span>: scalar).</li>
<li><b>num_values</b> (<i>int > 0</i>) – number of discrete action values
(<span style="color:#C00000"><b>required</b></span> for type "int").</li>
<li><b>min_value/max_value</b> (<i>float</i>) – minimum/maximum action value
(<span style="color:#00C000"><b>optional</b></span> for type "float").</li>
</ul>
max_episode_timesteps (int > 0): Upper bound for numer of timesteps per episode
(<span style="color:#00C000"><b>default</b></span>: not given, better implicitly
specified via `environment` argument for `Agent.create(...)`).
memory (int > 0): Replay memory capacity, has to fit at least maximum batch_size + maximum
network/estimator horizon + 1 timesteps
(<span style="color:#C00000"><b>required</b></span>).
batch_size (<a href="../modules/parameters.html">parameter</a>, int > 0): Number of
timesteps per update batch
(<span style="color:#C00000"><b>required</b></span>).
network ("auto" | specification): Policy network configuration, see the
[networks documentation](../modules/networks.html)
(<span style="color:#00C000"><b>default</b></span>: "auto", automatically configured
network).
update_frequency ("never" | <a href="../modules/parameters.html">parameter</a>, int > 0 | 0.0 < float <= 1.0):
Frequency of updates, relative to batch_size if float
(<span style="color:#00C000"><b>default</b></span>: 0.25 * batch_size).
start_updating (<a href="../modules/parameters.html">parameter</a>, int >= batch_size):
Number of timesteps before first update
(<span style="color:#00C000"><b>default</b></span>: none).
learning_rate (<a href="../modules/parameters.html">parameter</a>, float > 0.0): Optimizer
learning rate
(<span style="color:#00C000"><b>default</b></span>: 1e-3).
huber_loss (<a href="../modules/parameters.html">parameter</a>, float > 0.0): Huber loss
threshold
(<span style="color:#00C000"><b>default</b></span>: no huber loss).
horizon (<a href="../modules/parameters.html">parameter</a>, int >= 1): n-step DQN, horizon
of discounted-sum reward estimation before target network estimate
(<span style="color:#00C000"><b>default</b></span>: 1).
discount (<a href="../modules/parameters.html">parameter</a>, 0.0 <= float <= 1.0): Discount
factor for future rewards of discounted-sum reward estimation
(<span style="color:#00C000"><b>default</b></span>: 0.99).
return_processing (specification): Return processing as layer or list of layers, see the
[preprocessing documentation](../modules/preprocessing.html)
(<span style="color:#00C000"><b>default</b></span>: no return processing).
predict_terminal_values (bool): Whether to predict the value of terminal states, usually
not required since max_episode_timesteps terminals are handled separately
(<span style="color:#00C000"><b>default</b></span>: false).
target_update_weight (<a href="../modules/parameters.html">parameter</a>, 0.0 < float <= 1.0):
Target network update weight
(<span style="color:#00C000"><b>default</b></span>: 1.0).
target_sync_frequency (<a href="../modules/parameters.html">parameter</a>, int >= 1):
Interval between target network updates
(<span style="color:#00C000"><b>default</b></span>: every update).
l2_regularization (<a href="../modules/parameters.html">parameter</a>, float >= 0.0):
L2 regularization loss weight
(<span style="color:#00C000"><b>default</b></span>: no L2 regularization).
entropy_regularization (<a href="../modules/parameters.html">parameter</a>, float >= 0.0):
Entropy regularization loss weight, to discourage the policy distribution from being
"too certain"
(<span style="color:#00C000"><b>default</b></span>: no entropy regularization).
state_preprocessing (dict[specification]): State preprocessing as layer or list of layers,
see the [preprocessing documentation](../modules/preprocessing.html),
specified per state-type or -name
(<span style="color:#00C000"><b>default</b></span>: linear normalization of bounded
float states to [-2.0, 2.0]).
reward_preprocessing (specification): Reward preprocessing as layer or list of layers,
see the [preprocessing documentation](../modules/preprocessing.html)
(<span style="color:#00C000"><b>default</b></span>: no reward preprocessing).
exploration (<a href="../modules/parameters.html">parameter</a> | dict[<a href="../modules/parameters.html">parameter</a>], float >= 0.0):
Exploration, defined as the probability for uniformly random output in case of `bool`
and `int` actions, and the standard deviation of Gaussian noise added to every output in
case of `float` actions, specified globally or per action-type or -name
(<span style="color:#00C000"><b>default</b></span>: no exploration).
variable_noise (<a href="../modules/parameters.html">parameter</a>, float >= 0.0):
Add Gaussian noise with given standard deviation to all trainable variables, as
alternative exploration mechanism
(<span style="color:#00C000"><b>default</b></span>: no variable noise).<br/><br/>
>>>: For arguments below, see the [Tensorforce agent documentation](tensorforce.html).
parallel_interactions (int > 0)
config (specification)
saver (path | specification)
summarizer (path | specification)
tracking ("all" | iter[string])
recorder (path | specification)
"""
def __init__(
# Required
self, states, actions, memory, batch_size,
# Environment
max_episode_timesteps=None,
# Network
network='auto',
# Optimization
update_frequency=0.25, start_updating=None, learning_rate=1e-3, huber_loss=None,
# Reward estimation
horizon=1, discount=0.99, return_processing=None, predict_terminal_values=False,
# Target network
target_update_weight=1.0, target_sync_frequency=1,
# Preprocessing
state_preprocessing='linear_normalization', reward_preprocessing=None,
# Exploration
exploration=0.0, variable_noise=0.0,
# Regularization
l2_regularization=0.0, entropy_regularization=0.0,
# Parallel interactions
parallel_interactions=1,
# Config, saver, summarizer, tracking, recorder
config=None, saver=None, summarizer=None, tracking=None, recorder=None,
# Deprecated
**kwargs
):
if 'estimate_terminal' in kwargs:
raise TensorforceError.deprecated(
name='DoubleDQN', argument='estimate_terminal',
replacement='predict_terminal_values'
)
self.spec = OrderedDict(
agent='dqn',
states=states, actions=actions, memory=memory, batch_size=batch_size,
max_episode_timesteps=max_episode_timesteps,
network=network,
update_frequency=update_frequency, start_updating=start_updating,
learning_rate=learning_rate, huber_loss=huber_loss,
horizon=horizon, discount=discount, return_processing=return_processing,
predict_terminal_values=predict_terminal_values,
target_update_weight=target_update_weight, target_sync_frequency=target_sync_frequency,
state_preprocessing=state_preprocessing, reward_preprocessing=reward_preprocessing,
exploration=exploration, variable_noise=variable_noise,
l2_regularization=l2_regularization, entropy_regularization=entropy_regularization,
parallel_interactions=parallel_interactions,
config=config, saver=saver, summarizer=summarizer, tracking=tracking, recorder=recorder
)
policy = dict(
type='parametrized_value_policy', network=network, state_value_mode='implicit'
)
memory = dict(type='replay', capacity=memory)
update = dict(
unit='timesteps', batch_size=batch_size, frequency=update_frequency,
start=start_updating
)
optimizer = dict(type='adam', learning_rate=learning_rate)
objective = dict(type='action_value', huber_loss=huber_loss)
reward_estimation = dict(
horizon=horizon, discount=discount, predict_horizon_values='late',
estimate_advantage=False, predict_action_values=True,
return_processing=return_processing, predict_terminal_values=predict_terminal_values
)
baseline = policy
baseline_optimizer = dict(
type='synchronization', update_weight=target_update_weight,
sync_frequency=target_sync_frequency
)
baseline_objective = None
super().__init__(
# Agent
states=states, actions=actions, max_episode_timesteps=max_episode_timesteps,
parallel_interactions=parallel_interactions, config=config, recorder=recorder,
# TensorforceModel
policy=policy, memory=memory, update=update, optimizer=optimizer, objective=objective,
reward_estimation=reward_estimation,
baseline=baseline, baseline_optimizer=baseline_optimizer,
baseline_objective=baseline_objective,
l2_regularization=l2_regularization, entropy_regularization=entropy_regularization,
state_preprocessing=state_preprocessing, reward_preprocessing=reward_preprocessing,
exploration=exploration, variable_noise=variable_noise,
saver=saver, summarizer=summarizer, tracking=tracking, **kwargs
)
| 55.52381
| 146
| 0.641587
|
7953692b8e474a55cc21e4fc71752d4ce6d819e9
| 8,538
|
py
|
Python
|
experiments_koord/app_krd_py/dist_delivery_w_markers.krd.py
|
lyg1597/CyPhyHouseExperiments
|
7a2604a4c086bb86e10b4188f0b114d31ccdfa37
|
[
"NCSA"
] | 4
|
2020-08-09T01:34:29.000Z
|
2021-12-24T18:08:03.000Z
|
experiments_koord/app_krd_py/dist_delivery_w_markers.krd.py
|
lyg1597/CyPhyHouseExperiments
|
7a2604a4c086bb86e10b4188f0b114d31ccdfa37
|
[
"NCSA"
] | 2
|
2020-01-14T03:23:39.000Z
|
2020-11-12T17:51:09.000Z
|
experiments_koord/app_krd_py/dist_delivery_w_markers.krd.py
|
cyphyhouse/CyPhyHouse-Dev
|
b72bfc1a2beb379a1c3e429bb979815a82242707
|
[
"NCSA"
] | 3
|
2021-06-16T22:13:28.000Z
|
2022-02-16T13:10:58.000Z
|
import numpy as np
import rospy
from cym_gazebo import marker_builder
from cym_marker.msg import Marker
from src.config.configs import AgentConfig, MoatConfig
from src.harness.agentThread import AgentThread
from src.motion.deconflict import clear_path
from src.motion.rectobs import RectObs
from src.motion.cylobs import CylObs
from src.objects.udt import Task
from src.motion.pos_types import pos3d, Pos
MARKER_Z = 0.05
class TaskApp(AgentThread):
TASKS = [
Task(pos3d(pos[0], pos[1], pos[2]), i, False, None)
for i, pos in enumerate([
# Drones' tasks
(+3.0, 1.8, 4.0),
(-3.0, 0.2, 2.6),
(+2.5, 6.3, 4.0),
(-2.5, -4.5, 2.6),
(+3.0, -4.5, 4.0),
(-1.5, 6.3, 1.3),
(-3.0, 1.8, 2.6),
(+3.0, 0.2, 1.3),
(-2.5, 6.3, 2.6),
(-2.5, -6.0, 2.6),
(+3.0, -6.0, 4.0),
])
]
SHELVE_SCALE = np.array([5.4, 2.0, 6.0])
OBSTACLES = [
CylObs(Pos(np.array([3.8239, 3.403029, 0])), radius=0.5, height=1.6),
RectObs(Pos(np.array([1.0, 3.4, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([-4.13394, 3.4, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([1.0, 4.7, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([-4.13394, 4.7, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([1.0, -1.5, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([-4.13394, -1.5, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([1.0, -2.8, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([-4.13394, -2.8, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([1.0, -7.8, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([-4.13394, -7.8, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([1.0, -9.1, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([-4.13394, -9.1, 3.0])), SHELVE_SCALE),
RectObs(Pos(np.array([-8.34545, 0.0, 1.6])), np.array([0.4, 7.8, 3.2])),
RectObs(Pos(np.array([-3.5, 9.0, 1.6])), np.array([7.8, 0.4, 3.2]))
]
def __init__(self, agent_config: AgentConfig, moat_config: MoatConfig):
super(TaskApp, self).__init__(agent_config, moat_config)
self._pub_marker = rospy.Publisher("/cym_marker", Marker,
queue_size=10,
latch=True)
if self.agent_gvh.is_leader:
self.init_all_task_markers()
self.init_all_obstacle_markers()
self.moat.planner.min_rand = -10.0
self.moat.planner.max_rand = 10.0
self.moat.planner.min_zrand = 0.0
self.moat.planner.max_zrand = 4.5
self.moat.planner.expand_dis = 3.0
self.moat.planner.max_iter = 200
def initialize_vars(self):
self.initialize_lock('pick_route')
self.agent_gvh.create_aw_var('tasks', list, TaskApp.TASKS)
self.agent_gvh.create_ar_var('route', list, [self.moat.position])
self.locals['my_task'] = None
self.locals['test_route'] = None
self.locals['doing'] = False
self.locals['tasks'] = []
self.locals['obstacles'] = self.OBSTACLES
def loop_body(self):
if not self.locals['doing']:
if sum([int(a.assigned) for a in self.read_from_shared('tasks', None)]) == len(
self.read_from_shared('tasks', None)):
self.trystop()
return
if self.lock('pick_route'):
self.locals['tasks'] = self.read_from_shared('tasks', None)
print("Agent", self.pid(), "at", self.moat.position,
"has lock. Remaining tasks:",
[t.id for t in self.locals['tasks'] if not t.assigned])
for i in range(len(self.locals['tasks'])):
if not self.locals['tasks'][i].assigned:
self.locals['my_task'] = self.locals['tasks'][i]
self.locals['test_route'] = self.moat.planner.find_path(self.moat.position,
self.locals['my_task'].location,
self.locals['obstacles'])
if clear_path([path for path in
[self.read_from_shared('route', pid) for pid in range(self.num_agents())]],
self.locals['test_route'], self.pid(), tolerance=1.0):
self.locals['doing'] = True
self.locals['my_task'].assign(self.pid())
self.assign_task_marker() # Add a visual marker in simulator
self.locals['tasks'][i] = self.locals['my_task']
self.agent_gvh.put('tasks', self.locals['tasks'])
self.agent_gvh.put('route', self.locals['test_route'], self.pid())
print("Agent", self.pid(), "is going to task", i, "at", self.locals['my_task'].location)
self.moat.follow_path(self.locals['test_route'])
else:
self.agent_gvh.put('route', [self.moat.position],
self.pid())
self.locals['my_task'] = None
self.locals['doing'] = False
continue
break
if not self.locals['doing']:
print("Agent", self.pid(), "didnt find a clear path")
self.unlock('pick_route')
rospy.sleep(0.05)
else:
if self.moat.reached:
if self.locals['my_task'] is not None:
self.finish_task_marker()
self.locals['my_task'] = None
self.locals['doing'] = False
rospy.sleep(1.0) # Wait at the task for a while
return
def assign_task_marker(self):
marker = add_target_marker(self.locals['my_task'])
self._pub_marker.publish(marker)
def finish_task_marker(self):
marker = add_reached_marker(self.locals['my_task'])
self._pub_marker.publish(marker)
def init_all_task_markers(self):
for task in TaskApp.TASKS:
rospy.sleep(0.4)
marker = add_init_marker(task)
if marker:
self._pub_marker.publish(marker)
def init_all_obstacle_markers(self):
for i, obs in enumerate(self.OBSTACLES):
rospy.sleep(0.4)
marker = add_obstacle_marker("obstacle", i, obs)
if marker:
self._pub_marker.publish(marker)
def task_marker_builder(task: Task):
pos = task.location
if pos.z == 0: # Ground tasks
builder = marker_builder.PutCylinder()
builder.scale.z = 0.01
builder.pose.position.z = pos.z + MARKER_Z
else:
builder = marker_builder.PutSphere()
builder.scale.z = 1.0
builder.pose.position.z = pos.z
builder.ns = "tasks"
builder.id = task.id
builder.pose.position.x = pos.x
builder.pose.position.y = pos.y
builder.scale.x = 1.0
builder.scale.y = 1.0
return builder
def add_obstacle_marker(ns: str, m_id: int, obs):
if isinstance(obs, RectObs):
builder = marker_builder.PutBox()
builder.scale.x = obs.size[0]
builder.scale.y = obs.size[1]
builder.scale.z = obs.size[2]
elif isinstance(obs, CylObs):
builder = marker_builder.PutCylinder()
builder.scale.x = obs.diameter
builder.scale.y = obs.diameter
builder.scale.z = obs.size[2]
else:
return None
builder.ns = ns
builder.id = m_id
builder.pose.position.x = obs.position.x
builder.pose.position.y = obs.position.y
builder.pose.position.z = obs.position.z
builder.use_material("Gazebo/YellowTransparent")
return builder.build()
def add_init_marker(task: Task):
builder = task_marker_builder(task)
builder.use_material("Gazebo/BlackTransparent")
return builder.build()
def add_target_marker(task: Task):
builder = task_marker_builder(task)
builder.use_material("Gazebo/RedTransparent")
return builder.build()
def add_reached_marker(task: Task):
builder = task_marker_builder(task)
builder.use_material("Gazebo/GreenTransparent")
return builder.build()
| 39.527778
| 116
| 0.544273
|
79536a5549e67dd22565fe42a5ca2c8048956069
| 4,080
|
py
|
Python
|
setup/python_scripts/config.py
|
CIRCUSPi/ROSKY2
|
7df75cba4562424c1af5bd0b73597bb78ebaca3c
|
[
"Apache-2.0"
] | null | null | null |
setup/python_scripts/config.py
|
CIRCUSPi/ROSKY2
|
7df75cba4562424c1af5bd0b73597bb78ebaca3c
|
[
"Apache-2.0"
] | null | null | null |
setup/python_scripts/config.py
|
CIRCUSPi/ROSKY2
|
7df75cba4562424c1af5bd0b73597bb78ebaca3c
|
[
"Apache-2.0"
] | 1
|
2021-09-10T01:17:02.000Z
|
2021-09-10T01:17:02.000Z
|
#!/bin/usr/python3
#
#Copyright (c) 2021 Wei-Chih Lin(weichih.lin@protonmail.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os, sys, re
from ruamel.yaml import YAML
import argparse, getpass
class CONFIG_YAML(object):
'''
Default configuration:
- project: ROSKY
- ros_version: 2
'''
def __init__(self, ros_version=1):
# get project
self.project = re.split('/', os.getcwd())
self.project = self.project[len(self.project) - 3] # find project name
# set common function
self.yaml = YAML()
#setup argument
self.shell = os.popen("echo $SHELL | awk -F '/' '{print $NF}'").readlines()[0].rstrip("\n")
self.home_path = os.popen("echo $HOME").readlines()[0].rstrip("\n")
#print(self.home_path )
self.ubuntu = os.popen("grep RELEASE /etc/lsb-release | awk -F '=' '{print $2}'").readlines()[0].rstrip("\n")
## use inter function
self.ros_distro = self.get_ros_distro(ros_version=ros_version)
def read(self, _file):
with open(_file) as _file:
content = self.yaml.load(_file)
return content
def write(self, content, _file):
"""
write(content, _file)
"""
with open(_file, "w") as outfile:
self.yaml.preserve_quotes = False
self.yaml.dump(content, outfile)
print(f"Done! Now you can open \"{_file}\" to check out!")
def get_ros_distro(self, ros_version=1):
ros_distro_list = self.read(self.get_path(folder="/setup/env_variables", _file="ros_distro.yaml"))
return f"ROS {ros_version} " + ros_distro_list[f"ROS{ros_version}"][self.ubuntu]
def get_path(self, project="", folder="setup", _file="environment.sh"):
"""
Return: (file path) -> String
Args:
progect: String, Top folder name
folder: String, middle folder path
_file: String, File Name
get_path(project=String, folder=String, _file=String)
"""
project = self.project if project == "" else project
return f"{self.home_path}/{project}/{folder}/{_file}".replace("//", "/")
if __name__ == '__main__':
# call class
config = CONFIG_YAML()
#test_file = config.get_path(_file="test.yaml")
parser = argparse.ArgumentParser()
## ros version
parser.add_argument("--ros-version",
type=int,
help=f"Please set ros version that you want to add command.",
default=2)
## cmd
parser.add_argument("--cmd",
type=str,
help="source environment.sh",
default="source ~/ROSKY2/setup/shell_scripts/environment.sh")
args = parser.parse_args()
# set config.ros_distro
config.ros_distro = config.get_ros_distro(ros_version=args.ros_version)
# configure argument
file_path = {
"ros_menu": config.get_path(project="ros_menu", folder="", _file="config.yaml"),
}
# read content
content = {
"ros_menu": config.read(file_path["ros_menu"]),
}
# configure cmd
cmd = args.cmd
if content["ros_menu"]["Menu"][config.ros_distro]["cmds"] is None:
content["ros_menu"]["Menu"][config.ros_distro]["cmds"] = [cmd]
else:
content["ros_menu"]["Menu"][config.ros_distro]["cmds"].insert(0, cmd)
# write content
config.write(content["ros_menu"], file_path["ros_menu"])
| 32.64
| 117
| 0.601961
|
79536adfc7068ace3f4850126f7e62b0fb5d2d30
| 8,543
|
py
|
Python
|
receptor-kv/components/inputform.py
|
vallemrv/django-tpv
|
75f738f13ffb7b4ca53a06d79ea6c4aef6eb12aa
|
[
"Apache-2.0"
] | 2
|
2018-07-12T23:03:11.000Z
|
2018-07-16T19:09:01.000Z
|
receptor-kv/components/inputform.py
|
vallemrv/django-tpv
|
75f738f13ffb7b4ca53a06d79ea6c4aef6eb12aa
|
[
"Apache-2.0"
] | null | null | null |
receptor-kv/components/inputform.py
|
vallemrv/django-tpv
|
75f738f13ffb7b4ca53a06d79ea6c4aef6eb12aa
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# @Author: Manuel Rodriguez <valle>
# @Date: 16-Jul-2017
# @Email: valle.mrv@gmail.com
# @Filename: imputform.py
# @Last modified by: valle
# @Last modified time: 09-Feb-2018
# @License: Apache license vesion 2.0
from kivy.uix.relativelayout import RelativeLayout
from kivy.uix.anchorlayout import AnchorLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.behaviors import ButtonBehavior
from kivy.properties import (StringProperty, ObjectProperty, OptionProperty,
ListProperty, DictProperty, BooleanProperty, AliasProperty)
from kivy.lang import Builder
from kivy.animation import Animation
from kivy.metrics import dp
import components.resources as res
Builder.load_file(res.get_kv('inputform'))
tipo = [
'CharField', 'DecimalField', 'DateField',
'DateTimeField', 'BooleanField', 'IntegerField', 'FloatField', 'TextField'
]
def get_type_control(tipo):
if tipo in ['CharField', 'EmailField']:
return "text"
elif tipo in ["TextField"]:
return "text_edit"
elif tipo in ['IntegerField']:
return "number"
elif tipo in ['DateField',]:
return "date"
elif tipo in ['DateTimeField',]:
return "datetime"
elif tipo in ['DecimalField', 'FloatField',]:
return "float"
elif tipo in ['BooleanField',]:
return "boolean"
class FloatTextInput(AnchorLayout):
focus = BooleanProperty(False)
text = StringProperty("")
label = StringProperty("")
controller = ObjectProperty(None)
input = ObjectProperty(None)
active = BooleanProperty(False)
def __init__(self, **kargs):
super(FloatTextInput, self).__init__(**kargs)
def on_focus(self, w, l):
if not self.focus and self.input and self.active:
self.controller.hide(self.input)
class FloatColorInput(BoxLayout):
active = BooleanProperty(False)
def __init__(self, **kargs):
super(FloatColorInput, self).__init__(**kargs)
class FloatControl(RelativeLayout):
controller = ObjectProperty(None)
input = ObjectProperty(None)
text = StringProperty('')
focus = BooleanProperty(False)
label = StringProperty("")
content = ObjectProperty(None)
type_control = OptionProperty("text",
options=('text', 'color', 'password', 'date',
'number', "float", "text_edit", "boolean", "select"))
def __init__(self, **kargs):
super(FloatControl, self).__init__(**kargs)
self.textinput = FloatTextInput()
self.bind(focus=self.textinput.setter('focus'))
self.colorinput = FloatColorInput()
def on_controller(self, w, l):
self.textinput.controller = l
self.colorinput.controller = l
def collide_point(self, x, y):
return (x > self.x and x < self.x +self.width) and (y > self.y and y < self.y +self.height)
def on_touch_down(self, touch, *args):
super(FloatControl, self).on_touch_down(touch)
if self.collide_point(touch.x, touch.y):
return True
def print_type(self):
if self.content:
self.content.clear_widgets()
self.textinput.active = False
self.colorinput.active = False
input_control = None
if self.type_control in ('text', 'password'):
input_control = self.textinput
self.textinput.ids._input.password = (self.type_control == "password")
if self.type_control == 'color':
input_control = self.colorinput
input_control.text = self.text
input_control.label = self.label
input_control.input = self.input
input_control.active = True
self.content.add_widget(input_control)
def on_input(self, w, val):
self.text = val.text
self.label = val.label
self.type_control = val.type_control
self.print_type()
#This is how de control see it
class FormControl(RelativeLayout):
label = StringProperty("")
text = StringProperty("")
color = StringProperty("#000000")
bg_color = StringProperty("#ffffff")
font_size = StringProperty("30dp")
controller = ObjectProperty(None)
name = StringProperty("")
def __init__(self, model=None, **kargs):
self.isFormControl = True
kargs = {}
if model:
for k in model:
kargs[k] = model[k]
super(FormControl, self).__init__(**kargs)
def add_widget(self, widget):
if len(self.children) < 1:
super(FormControl,self).add_widget(widget)
else:
self.content.add_widget(widget)
class InputTextControl(ButtonBehavior, FormControl):
def set_text(self, value):
self.__text__ = value
def get_text(self):
if self.type_control == "password":
return "*" * len(self.__text__)
else:
return self.__text__
text = AliasProperty(get_text, set_text, bind=['__text__'])
__text__ = StringProperty("")
type_control = OptionProperty("text",
options=('text', 'color', 'password', 'date'))
def on_release(self):
if self.controller:
self.controller.show_text_input(self)
class CheckBoxControl(FormControl):
def set_text(self, value):
self.active = value
def get_text(self):
return self.active
text = AliasProperty(get_text, set_text, bind=['active'])
active = BooleanProperty(False)
def on_active(self, w, l):
if self.controller:
self.controller.on_model_chage(self)
class InputForm(RelativeLayout):
bg_color = StringProperty("#ffffff")
model = DictProperty({})
form_content = ObjectProperty(None)
on_press = ObjectProperty(None)
plantilla = DictProperty({
"all":{
'font_size': '20dp',
'size_hint': (1, None),
'height': '50dp',
'type_control': 'text'
}
})
def __init__(self, **kargs):
super(InputForm, self).__init__(**kargs)
def __clear_model__(self):
self.model = {}
self.form_content.clear_widgets()
def on_model_chage(self, form_control):
self.model[form_control.name] = unicode(form_control.__text__)
def hide(self, input):
self.float_input.focus = False
ani = Animation(x=self.width*2, duration=0.05)
ani.start(self.float_input)
self.on_model_chage(input)
def show_text_input(self, input):
ani = Animation(x=0, duration=0.05)
ani.bind(on_complete=self.on_complete)
self.float_input.input = input
ani.start(self.float_input)
def on_complete(self, ani, w):
self.float_input.focus = True
def add_widget(self, widget):
if len(self.children) < 3:
super(InputForm, self).add_widget(widget)
else:
if hasattr(widget, 'isFormControl'):
self.model[widget.name] = widget.text
height = self.form_content.parent.height + widget.height + dp(25)
self.form_content.parent.height = height
self.form_content.add_widget(widget, 0)
def add_model(self, model, columns=None, tmpl=None):
self.__clear_model__()
self.model = model
columns = columns if columns else model.keys()
for k in columns:
if tmpl and k in tmpl:
plantilla = self.plantilla.get("all").copy()
plantilla.update(tmpl[k])
elif k in self.plantilla:
plantilla = self.plantilla.get(k).copy()
else:
plantilla = self.plantilla.get("all").copy()
plantilla["name"] = k
plantilla["text"] = unicode(model[k])
plantilla["controller"] = self
if not "label" in plantilla:
plantilla["label"] = k.title()
if 'type_control' in plantilla and plantilla['type_control'] != 'text':
type_control = plantilla['type_control']
if type_control == 'checkbox':
input = CheckBoxControl(model=plantilla)
else:
input = InputTextControl(model=plantilla)
else:
input = InputTextControl(model=plantilla)
self.add_widget(input)
def form_check(self):
if self.on_press:
self.on_press(self.model)
| 31.996255
| 99
| 0.609388
|
79536b02902fc734538f9b915077e5d79fce36be
| 15,369
|
py
|
Python
|
inference.py
|
banr1jnts/Earthquake_Prediction
|
13757e3498ef26e8db261fa04c0437f0f1d2e884
|
[
"Apache-2.0"
] | 1
|
2019-02-28T10:49:00.000Z
|
2019-02-28T10:49:00.000Z
|
inference.py
|
banr1jnts/earthquake_prediction
|
13757e3498ef26e8db261fa04c0437f0f1d2e884
|
[
"Apache-2.0"
] | null | null | null |
inference.py
|
banr1jnts/earthquake_prediction
|
13757e3498ef26e8db261fa04c0437f0f1d2e884
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
import numpy as np
import pandas as pd
import os
import glob
import datetime
import random
import keras.optimizers
import keras.backend as K
from keras.callbacks import ReduceLROnPlateau, ModelCheckpoint, CSVLogger
from params import args
import models, naives, losses
if __name__ == '__main__':
mdl_name = args.model
ver = args.version
opt_name = args.optimizer
learning_rate = args.learning_rate
decay = args.decay
ls_name = args.loss
stateful = args.stateful
lb = args.lookback
batch_size = args.batch_size
epochs = args.epochs
num_filters = args.num_filters
dropouts = args.dropouts
recurrent_dropouts = args.recurrent_dropouts
random_seed = args.random_seed
naive_period = args.naive_period
st_day = args.start_day
sp1_day = args.split_day_1
sp2_day = args.split_day_2
ed_day = args.end_day
input_raw_dir = args.input_raw_dir
input_preprocessed_dir = args.input_preprocessed_dir
log_dir = args.log_dir
vb = args.verbose
record = args.record
fmt = '%Y-%m-%d'
os.environ['PYTHONHASHSEED'] = '0'
np.random.seed(random_seed)
random.seed(random_seed)
tf.set_random_seed(random_seed)
def list_to_str(list):
return ' '.join(map(str, list))
def get_default_lr(optimizer_name):
dict = {'SGD': 0.01, 'RMSprop':0.001, 'Adagrad': 0.01,
'Adadelta':1.0, 'Adam': 0.001, 'Nadam': 0.002}
return dict[optimizer_name]
def get_period(start_day, split_day_1, split_day_2, end_day):
train_st = start_day
train_ed = split_day_1 - datetime.timedelta(days=1)
train_period = (train_ed - train_st).days + 1
val_st = split_day_1
val_ed = split_day_2 - datetime.timedelta(days=1)
val_period = (val_ed - val_st).days + 1
test_st = split_day_2
test_ed = end_day
test_period = (test_ed - test_st).days + 1
print('train period: {} ~ {} ({:0=4}days)'.format(train_st.strftime(fmt),
train_ed.strftime(fmt),
train_period))
print('validate period: {} ~ {} ({:0=4}days)'.format(val_st.strftime(fmt),
val_ed.strftime(fmt),
val_period))
print('test period: {} ~ {} ({:0=4}days)'.format(test_st.strftime(fmt),
test_ed.strftime(fmt),
test_period))
return train_period, val_period, test_period
def find_class(name, modules):
modules = [getattr(module, name, None) for module in modules]
return next(a for a in modules if a)
def raw_to_csv(raw_files, csv_file):
with open(csv_file, 'w') as csv:
header = "year,month,day,longitude,latitude,depth,magnitude\n"
csv.writelines(header)
for raw_file in raw_files:
with open(raw_file, 'r') as raw:
raw_lines = raw.readlines()
for raw_line in raw_lines:
if raw_line[0] != 'J':
continue
elif raw_line[52] in [' ', '-', 'A', 'B', 'C']:
continue
year = raw_line[1:5]
month = raw_line[5:7]
day = raw_line[7:9]
latitude = raw_line[22:24]
longitude = raw_line[33:36]
depth = raw_line[45:49].strip()
magnitude = raw_line[52:54]
csv_list = [year, month, day,
longitude, latitude, depth, magnitude]
csv_line = ", ".join(csv_list) + "\n"
csv.writelines(csv_line)
def get_grid_data(df):
df['latlon'] = df['latitude'].astype(str) + '-' \
+ df['longitude'].astype(str)
df_area = pd.read_table(input_raw_dir + 'mainland.forecast.nodes.dat',
names=('lon', 'lat'),
delim_whitespace=True,)
df_area['latlon'] = df_area['lat'].astype(str).str[:2] + '-' \
+ df_area['lon'].astype(str).str[:3]
area = list(df_area['latlon'].unique())
df = df[df['latlon'].isin(area)]
df = df[['year', 'month', 'day', 'latlon']]
df = df.set_index('latlon')
df = df.reset_index()
return df
def get_daily_data(df, start, end, dummy_col):
start = str(start)
end = str(end)
df['time'] = df['year'].astype(str) + '-' \
+ df['month'].astype(str) + '-' + df['day'].astype(str)
df['time'] = pd.to_datetime(df['time'])
df = df[['time', 'latlon']]
df = pd.get_dummies(df, columns=['latlon'], prefix='', prefix_sep='')
col = pd.DataFrame(columns=dummy_col)
df = pd.concat([col, df], join='outer', sort=False)
df = df.fillna(0)
df = df.groupby('time').sum()
df = df[start: end]
idx = pd.DataFrame(index=pd.date_range(start, end))
df = pd.merge(idx, df, how='outer', left_index=True, right_index=True)
df = df.fillna(0)
df = df.astype(int)
return df
def get_test_true(test_gen, test_steps):
for step in range(test_steps):
_, target = next(test_gen)
day_target = np.mean(target, axis=1)
if step == 0:
bin_targets = target
day_targets = day_target
else:
bin_targets = np.vstack((bin_targets, target))
day_targets = np.hstack((day_targets, day_target))
return np.mean(bin_targets, axis=0), day_targets
def model_evaluate(test_gen, test_steps, pre_mean_loss, target_length, model):
for step in range(test_steps):
sample, target = next(test_gen)
pred = model.predict(sample)
day_pred = np.mean(pred, axis=1)
bin_error = pre_mean_loss(target, pred)
day_error = np.mean(bin_error, axis=1)
if step == 0:
bin_preds = pred
day_preds = day_pred
bin_errors = bin_error
day_errors = day_error
else:
bin_preds = np.vstack((bin_preds, pred))
day_preds = np.hstack((day_preds, day_pred))
bin_errors = np.vstack((bin_errors, bin_error))
day_errors = np.hstack((day_errors, day_error))
return (np.mean(bin_preds, axis=0), day_preds,
np.mean(bin_errors, axis=0), day_errors)
def generator(data, lookback, min_idx, max_idx, batch_size, target_length):
if max_idx is None:
max_idx = len(data) - 1
i = min_idx + lookback
while 1:
if i + batch_size > max_idx + 1:
i = min_idx + lookback
rows = np.arange(i, min(i + batch_size, max_idx + 1))
i += len(rows)
samples = np.zeros((len(rows), lookback, data.shape[-1]))
targets = np.zeros((len(rows), target_length))
for j, row in enumerate(rows):
idxs = range(rows[j] - lookback, rows[j])
samples[j] = data[idxs]
targets[j] = data[rows[j]][-target_length:]
yield samples, targets
def main():
session_conf = tf.ConfigProto(
intra_op_parallelism_threads=1,
inter_op_parallelism_threads=1
)
sess = tf.Session(graph=tf.get_default_graph(), config=session_conf)
K.set_session(sess)
train_period, val_period, test_period = get_period(
st_day, sp1_day, sp2_day, ed_day)
raw_files = sorted(glob.glob(input_raw_dir + 'h????'))
csv_file = input_preprocessed_dir + 'df.csv'
if not os.path.exists(csv_file):
raw_to_csv(raw_files, csv_file)
df = pd.read_csv(csv_file, low_memory=False)
df_m2 = df[df['magnitude'] >= 20]
df_m2 = get_grid_data(df_m2)
latlon = np.sort(df_m2['latlon'].unique())
df_m2 = get_daily_data(df_m2, start=st_day, end=ed_day, dummy_col=latlon)
df_m4 = df[df['magnitude'] >= 40]
df_m4 = get_grid_data(df_m4)
df_m4 = get_daily_data(df_m4, start=st_day, end=ed_day, dummy_col=latlon)
data_m2 = df_m2.values.astype(np.float64)
data_m4 = df_m4.values.astype(np.float64)
max_m2 = data_m2.max(axis=(0,1))
max_m4 = data_m4.max(axis=(0,1))
data_m2 = data_m2 * max_m4 / max_m2
target_length = data_m4.shape[1]
data = np.hstack([data_m2, data_m4])
print('data shape: {}'.format(data.shape))
lr = learning_rate if learning_rate else get_default_lr(opt_name)
optimizer = find_class(opt_name, [keras.optimizers])(lr=lr, decay=decay)
loss = find_class(ls_name, [losses, keras.losses])
pre_mean_loss = find_class(ls_name.replace('mean_', ''), [losses])
train_gen = generator(data,
lookback=lb,
min_idx=0,
max_idx=train_period - 1,
batch_size=batch_size,
target_length=target_length)
val_gen = generator(data,
lookback=lb,
min_idx=train_period,
max_idx=train_period + val_period - 1,
batch_size=batch_size,
target_length=target_length)
test_gen = generator(data,
lookback=lb,
min_idx=train_period + val_period,
max_idx=None,
batch_size=batch_size,
target_length=target_length)
train_steps = (train_period - lb) // batch_size
val_steps = (val_period - lb) // batch_size
test_steps = (len(data) - (train_period + val_period) - lb) // batch_size
naive_class = find_class('Poissonnaive', [naives])()
model_class = find_class(mdl_name, [models])()
naive = naive_class.build_naive(data,
batch_size=batch_size,
target_length=target_length)
model = model_class.build_model(data,
lookback=lb,
batch_size=batch_size,
stateful=stateful,
target_length=target_length,
num_filters=num_filters,
dropouts=dropouts,
recurrent_dropouts=recurrent_dropouts)
model.compile(optimizer=optimizer, loss=loss)
model.summary()
print('optimizer: {} (lr={})\nloss: {}\n'.format(opt_name, lr, ls_name))
callbacks = [
ModelCheckpoint(filepath=log_dir + 'ckpt_{}{}.h5'.format(mdl_name, ver),
monitor='val_loss', save_best_only=True, verbose=vb),
ReduceLROnPlateau(monitor='val_loss', factor=0.1,
patience=10, verbose=vb),
CSVLogger(log_dir + 'history_{}{}.csv'.format(mdl_name, ver)),
]
print('【training】')
history = model.fit_generator(train_gen,
steps_per_epoch=train_steps,
epochs=epochs,
validation_data=val_gen,
validation_steps=val_steps,
callbacks=callbacks,
verbose=vb)
print('【evaluation】')
bin_true, day_true = get_test_true(test_gen, test_steps)
true = np.mean(bin_true)
nv_bin_pred, nv_day_pred, nv_bin_eval, nv_day_eval = model_evaluate(
test_gen, test_steps, pre_mean_loss, target_length, naive)
nv_pred = np.mean(nv_bin_pred)
nv_eval = np.mean(nv_bin_eval)
md_bin_pred, md_day_pred, md_bin_eval, md_day_eval = model_evaluate(
test_gen, test_steps, pre_mean_loss, target_length, model)
md_pred = np.mean(md_bin_pred)
md_eval = np.mean(md_bin_eval)
print('Naivemodel: {}'.format(nv_eval))
print('{}{}: {}'.format(mdl_name, ver, md_eval))
df_day_true = pd.DataFrame(day_true,
index=pd.date_range(end=ed_day, periods=93),
columns=['True value'])
df_nv_day_pred = pd.DataFrame(nv_day_pred,
index=pd.date_range(end=ed_day, periods=93),
columns=['Naive predicton'])
df_md_day_pred = pd.DataFrame(md_day_pred,
index=pd.date_range(end=ed_day, periods=93),
columns=['{} prediction'.format(mdl_name)])
df_nv_day_eval = pd.DataFrame(nv_day_eval,
index=pd.date_range(end=ed_day, periods=93),
columns=['Naive error'])
df_md_day_eval = pd.DataFrame(md_day_eval,
index=pd.date_range(end=ed_day, periods=93),
columns=['{} error'.format(mdl_name)])
df_day_eval = pd.concat([df_nv_day_pred, df_md_day_pred, df_day_true,
df_nv_day_eval, df_md_day_eval],
axis=1)
df_day_eval.to_csv(log_dir + 'eval_day_{}{}.csv'.format(mdl_name, ver))
df_bin_true = pd.DataFrame(bin_true,
index=latlon,
columns=['True value'])
df_nv_bin_pred = pd.DataFrame(nv_bin_pred,
index=latlon,
columns=['Naive predicton'])
df_md_bin_pred = pd.DataFrame(md_bin_pred,
index=latlon,
columns=['{} prediction'.format(mdl_name)])
df_nv_bin_eval = pd.DataFrame(nv_bin_eval,
index=latlon,
columns=['Naive error'])
df_md_bin_eval = pd.DataFrame(md_bin_eval,
index=latlon,
columns=['{} error'.format(mdl_name)])
df_bin_eval = pd.concat([df_nv_bin_pred, df_md_bin_pred, df_bin_true,
df_nv_bin_eval, df_md_bin_eval],
axis=1)
df_bin_eval.to_csv(log_dir + 'eval_bin_{}{}.csv'.format(mdl_name, ver))
if not record:
return
now = datetime.datetime.now().strftime(fmt)
record_file = log_dir + 'record.csv'
str_num_filters = list_to_str(num_filters)
str_dropouts = list_to_str(dropouts)
str_recurrent_dropouts = list_to_str(recurrent_dropouts)
if os.path.exists(record_file):
with open(log_dir + 'record.csv', 'a') as f:
f.write('{},{},{}{},{},{},{},{},{},{},{},{}\n'
.format(now, md_eval, mdl_name, ver, str_num_filters,
opt_name, lr, decay, str_dropouts,
str_recurrent_dropouts, epochs,random_seed))
else:
with open(log_dir + 'record.csv', 'a') as f:
f.write('date,eval,model,filt,optm,lr,' \
'decay,drpout,r_drpout,epch,seed\n')
f.write('{},{},Naivemodel'.format(now, nv_eval) + ',None'*8 + '\n')
f.write('{},{},{}{},{},{},{},{},{},{},{},{}\n'
.format(now, md_eval, mdl_name, ver, str_num_filters,
opt_name, lr, decay, str_dropouts,
str_recurrent_dropouts, epochs,random_seed))
if __name__ == '__main__':
main()
| 43.171348
| 80
| 0.554298
|
79536bbac3905f6519f5644bac1cdb6409632429
| 9,887
|
py
|
Python
|
saleor/discount/models.py
|
saurabhsingla15/himichain
|
f7e446c89951a60383632907fed14bde5c3ad6b5
|
[
"CC-BY-4.0"
] | null | null | null |
saleor/discount/models.py
|
saurabhsingla15/himichain
|
f7e446c89951a60383632907fed14bde5c3ad6b5
|
[
"CC-BY-4.0"
] | 16
|
2020-02-12T03:06:29.000Z
|
2022-02-10T20:29:25.000Z
|
saleor/discount/models.py
|
saurabhsingla15/himichain
|
f7e446c89951a60383632907fed14bde5c3ad6b5
|
[
"CC-BY-4.0"
] | null | null | null |
from decimal import Decimal
from functools import partial
from django.conf import settings
from django.db import models
from django.db.models import F, Q
from django.utils import timezone
from django.utils.translation import pgettext, pgettext_lazy
from django_countries.fields import CountryField
from django_prices.models import MoneyField
from django_prices.templatetags.prices import amount
from prices import Money, fixed_discount, percentage_discount
from ..core.utils.translations import TranslationProxy
from . import DiscountValueType, VoucherType
class NotApplicable(ValueError):
"""Exception raised when a discount is not applicable to a checkout.
The error is raised if the order value is below the minimum required
price or the order quantity is below the minimum quantity of items.
Minimum price will be available as the `min_spent` attribute.
Minimum quantity will be available as the `min_checkout_items_quantity` attribute.
"""
def __init__(self, msg, min_spent=None, min_checkout_items_quantity=None):
super().__init__(msg)
self.min_spent = min_spent
self.min_checkout_items_quantity = min_checkout_items_quantity
class VoucherQueryset(models.QuerySet):
def active(self, date):
return self.filter(
Q(usage_limit__isnull=True) | Q(used__lt=F("usage_limit")),
Q(end_date__isnull=True) | Q(end_date__gte=date),
start_date__lte=date,
)
def expired(self, date):
return self.filter(
Q(used__gte=F("usage_limit")) | Q(end_date__lt=date), start_date__lt=date
)
class Voucher(models.Model):
type = models.CharField(
max_length=20, choices=VoucherType.CHOICES, default=VoucherType.ENTIRE_ORDER
)
name = models.CharField(max_length=255, null=True, blank=True)
code = models.CharField(max_length=12, unique=True, db_index=True)
usage_limit = models.PositiveIntegerField(null=True, blank=True)
used = models.PositiveIntegerField(default=0, editable=False)
start_date = models.DateTimeField(default=timezone.now)
end_date = models.DateTimeField(null=True, blank=True)
# this field indicates if discount should be applied per order or
# individually to every item
apply_once_per_order = models.BooleanField(default=False)
apply_once_per_customer = models.BooleanField(default=False)
discount_value_type = models.CharField(
max_length=10,
choices=DiscountValueType.CHOICES,
default=DiscountValueType.FIXED,
)
discount_value = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
)
discount = MoneyField(amount_field="discount_value", currency_field="currency")
# not mandatory fields, usage depends on type
countries = CountryField(multiple=True, blank=True)
currency = models.CharField(
max_length=settings.DEFAULT_CURRENCY_CODE_LENGTH,
default=settings.DEFAULT_CURRENCY,
)
min_spent_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
blank=True,
null=True,
)
min_spent = MoneyField(amount_field="min_spent_amount", currency_field="currency")
#min_checkout_items_quantity = models.PositiveIntegerField(null=True, blank=True)
min_checkout_items_quantity = models.DecimalField(max_digits=12,decimal_places=2,null=True, blank=True)
products = models.ManyToManyField("product.Product", blank=True)
collections = models.ManyToManyField("product.Collection", blank=True)
categories = models.ManyToManyField("product.Category", blank=True)
objects = VoucherQueryset.as_manager()
translated = TranslationProxy()
def __str__(self):
if self.name:
return self.name
discount = "%s %s" % (
self.discount_value,
self.get_discount_value_type_display(),
)
if self.type == VoucherType.SHIPPING:
if self.is_free:
return pgettext("Voucher type", "Free shipping")
return pgettext("Voucher type", "%(discount)s off shipping") % {
"discount": discount
}
if self.type == VoucherType.SPECIFIC_PRODUCT:
return pgettext("Voucher type", "%(discount)s off specific products") % {
"discount": discount
}
return pgettext("Voucher type", "%(discount)s off") % {"discount": discount}
@property
def is_free(self):
return (
self.discount_value == Decimal(100)
and self.discount_value_type == DiscountValueType.PERCENTAGE
)
def get_discount(self):
if self.discount_value_type == DiscountValueType.FIXED:
discount_amount = Money(self.discount_value, settings.DEFAULT_CURRENCY)
return partial(fixed_discount, discount=discount_amount)
if self.discount_value_type == DiscountValueType.PERCENTAGE:
return partial(percentage_discount, percentage=self.discount_value)
raise NotImplementedError("Unknown discount type")
def get_discount_amount_for(self, price: Money):
discount = self.get_discount()
after_discount = discount(price)
if after_discount.amount < 0:
return price
return price - after_discount
def validate_min_spent(self, value: Money):
if self.min_spent and value < self.min_spent:
msg = pgettext(
"Voucher not applicable",
"This offer is only valid for orders over %(amount)s.",
)
raise NotApplicable(
msg % {"amount": amount(self.min_spent)}, min_spent=self.min_spent
)
def validate_min_checkout_items_quantity(self, quantity):
min_checkout_items_quantity = self.min_checkout_items_quantity
if min_checkout_items_quantity and min_checkout_items_quantity > quantity:
msg = pgettext(
"Voucher not applicable",
(
"This offer is only valid for orders with a minimum of "
"%(min_checkout_items_quantity)d quantity."
),
)
raise NotApplicable(
msg % {"min_checkout_items_quantity": min_checkout_items_quantity},
min_checkout_items_quantity=min_checkout_items_quantity,
)
def validate_once_per_customer(self, customer_email):
voucher_customer = VoucherCustomer.objects.filter(
voucher=self, customer_email=customer_email
)
if voucher_customer:
msg = pgettext(
"Voucher not applicable", "This offer is valid only once per customer."
)
raise NotApplicable(msg)
class VoucherCustomer(models.Model):
voucher = models.ForeignKey(
Voucher, related_name="customers", on_delete=models.CASCADE
)
customer_email = models.EmailField()
class Meta:
unique_together = (("voucher", "customer_email"),)
class SaleQueryset(models.QuerySet):
def active(self, date=None):
if date is None:
date = timezone.now()
return self.filter(
Q(end_date__isnull=True) | Q(end_date__gte=date), start_date__lte=date
)
def expired(self, date=None):
if date is None:
date = timezone.now()
return self.filter(end_date__lt=date, start_date__lt=date)
class VoucherTranslation(models.Model):
language_code = models.CharField(max_length=10)
name = models.CharField(max_length=255, null=True, blank=True)
voucher = models.ForeignKey(
Voucher, related_name="translations", on_delete=models.CASCADE
)
class Meta:
unique_together = (("language_code", "voucher"),)
class Sale(models.Model):
name = models.CharField(max_length=255)
type = models.CharField(
max_length=10,
choices=DiscountValueType.CHOICES,
default=DiscountValueType.FIXED,
)
value = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
default=0,
)
products = models.ManyToManyField("product.Product", blank=True)
categories = models.ManyToManyField("product.Category", blank=True)
collections = models.ManyToManyField("product.Collection", blank=True)
start_date = models.DateTimeField(default=timezone.now)
end_date = models.DateTimeField(null=True, blank=True)
objects = SaleQueryset.as_manager()
translated = TranslationProxy()
class Meta:
app_label = "discount"
permissions = (
(
"manage_discounts",
pgettext_lazy("Permission description", "Manage sales and vouchers."),
),
)
def __repr__(self):
return "Sale(name=%r, value=%r, type=%s)" % (
str(self.name),
self.value,
self.get_type_display(),
)
def __str__(self):
return self.name
def get_discount(self):
if self.type == DiscountValueType.FIXED:
discount_amount = Money(self.value, settings.DEFAULT_CURRENCY)
return partial(fixed_discount, discount=discount_amount)
if self.type == DiscountValueType.PERCENTAGE:
return partial(percentage_discount, percentage=self.value)
raise NotImplementedError("Unknown discount type")
class SaleTranslation(models.Model):
language_code = models.CharField(max_length=10)
name = models.CharField(max_length=255, null=True, blank=True)
sale = models.ForeignKey(
Sale, related_name="translations", on_delete=models.CASCADE
)
class Meta:
unique_together = (("language_code", "sale"),)
| 37.309434
| 107
| 0.67432
|
79536ca01ebc2a141ccaac78dc01b0d69fb0903b
| 998
|
py
|
Python
|
python/pyopenGL/ogl2/ogl_14_plot_2.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 16
|
2018-11-26T08:39:42.000Z
|
2019-05-08T10:09:52.000Z
|
python/pyopenGL/ogl2/ogl_14_plot_2.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 8
|
2020-05-04T06:29:26.000Z
|
2022-02-12T05:33:16.000Z
|
python/pyopenGL/ogl2/ogl_14_plot_2.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 5
|
2020-02-11T16:02:21.000Z
|
2021-02-05T07:48:30.000Z
|
#// last done till 43 pg no do the graph inequalities the next day.
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
import sys
#from numpy import *
import numpy as np
import math
def init():
glClearColor(1.0,1.0,1.0,1.0)
gluOrtho2D(-5.0,5.0,-5.0,5.0)
def plotfunc():
glClear(GL_COLOR_BUFFER_BIT)
glColor3f(0.0,0.0,0.0) # color
glPointSize(1.0)
for a in np.arange(1.0,3.0,0.1):
for t in np.arange(-4.4,4.4,0.01):
x = 0.3*a*(t*t-3)
y = 0.1*a*t*(t*t-3)
glBegin(GL_POINTS)
glVertex2f(x,y)
glEnd()
glFlush()
def main():
glutInit(sys.argv) # tells the python we are going to be displaying GLUT style graphics
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB)
glutCreateWindow("Plot Points")
glutInitWindowSize(400,400)
glutInitWindowPosition(50,50)
glutDisplayFunc(plotfunc)
init()
glutMainLoop()
main()
| 25.589744
| 120
| 0.602204
|
79536caa54b0faf0ec6e56ee22ec98ad6ea8876f
| 484
|
py
|
Python
|
jp.atcoder/abc047/abc047_a/8143659.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | 1
|
2022-02-09T03:06:25.000Z
|
2022-02-09T03:06:25.000Z
|
jp.atcoder/abc047/abc047_a/8143659.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | 1
|
2022-02-05T22:53:18.000Z
|
2022-02-09T01:29:30.000Z
|
jp.atcoder/abc047/abc047_a/8143659.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | null | null | null |
packs = [int(candies) for candies in input().split()]
N = int(len(packs))
X = 2
if sum(packs) % X == 0:
A = int(sum(packs) / X)
else:
print("No")
exit()
dp = [[False] * (A + 1)] * (N + 1)
dp[0][0] = True
for i in range(N):
for j in range(A + 1):
if dp[i][j]:
dp[i + 1][j] = dp[i][j]
elif j >= packs[i]:
dp[i + 1][j] = dp[i][j - packs[i]]
if dp[N][A]:
ans = "Yes"
else:
ans = "No"
print(ans)
| 18.615385
| 54
| 0.423554
|
79536ce5d70033439fc5f14ce5a1d2dce405941c
| 674
|
py
|
Python
|
tests/unit/multi_dimensional_RNN/_test_mdgru_on_2d_grid.py
|
X-rayLaser/multi-directional-mdrnn
|
70b0e1c2e07b5f476c264c6700e8d34d41a2ce10
|
[
"MIT"
] | 12
|
2020-02-27T08:34:44.000Z
|
2022-03-15T12:22:32.000Z
|
tests/unit/multi_dimensional_RNN/_test_mdgru_on_2d_grid.py
|
X-rayLaser/multi-directional-mdrnn
|
70b0e1c2e07b5f476c264c6700e8d34d41a2ce10
|
[
"MIT"
] | 1
|
2021-02-23T16:29:05.000Z
|
2021-05-07T15:05:54.000Z
|
tests/unit/multi_dimensional_RNN/_test_mdgru_on_2d_grid.py
|
X-rayLaser/multi-directional-mdrnn
|
70b0e1c2e07b5f476c264c6700e8d34d41a2ce10
|
[
"MIT"
] | 2
|
2020-03-31T15:44:06.000Z
|
2021-05-10T15:26:35.000Z
|
from .test_mdrnn_on_2d_grid import Degenerate2DInputToMDRNNTests, \
OutputShapeGiven2DTests, OutputShapeGiven6DInputTests
import tensorflow as tf
from mdrnn import MDGRU
class Degenerate2DInputToMDGRUTests(Degenerate2DInputToMDRNNTests):
def create_mdrnn(self, **kwargs):
return MDGRU(**kwargs)
def create_keras_rnn(self, **kwargs):
return tf.keras.layers.GRU(implementation=1, reset_after=False, **kwargs)
class MDGRUOutputShapeGiven2DTests(OutputShapeGiven2DTests):
def get_rnn_class(self):
return MDGRU
class MDGRUOutputShapeGiven6DInputTests(OutputShapeGiven6DInputTests):
def get_rnn_class(self):
return MDGRU
| 29.304348
| 81
| 0.783383
|
79536d7f5ed5fa48a58aa5c4db4b24f01ec8935b
| 7,231
|
py
|
Python
|
v3/disentangled training/lib.py
|
biboamy/instrument-disentangle
|
bdf6e7d36ce36e6abe0249712cc9b853e77e7a36
|
[
"MIT"
] | 19
|
2019-05-28T01:11:03.000Z
|
2021-12-31T01:38:47.000Z
|
v3/disentangled training/lib.py
|
biboamy/instrument-disentangle
|
bdf6e7d36ce36e6abe0249712cc9b853e77e7a36
|
[
"MIT"
] | 3
|
2020-03-27T03:34:37.000Z
|
2020-10-04T11:32:17.000Z
|
v3/disentangled training/lib.py
|
biboamy/instrument-disentangle
|
bdf6e7d36ce36e6abe0249712cc9b853e77e7a36
|
[
"MIT"
] | 6
|
2019-08-23T09:58:25.000Z
|
2022-02-09T09:35:10.000Z
|
import librosa, torch
#from pypianoroll import Multitrack, Track
import numpy as np
import torch.nn.init as init
from torch.utils.data import Dataset
import torch.nn as nn
from torch.autograd import Variable
import torch.nn.functional as F
from random import randint
def griffin_lim(mag_spec, n_fft, hop, iterations):
"""Reconstruct an audio signal from a magnitude spectrogram.
Args:
mag_spec (2D numpy array): The magnitude spectrogram.
(row: frame, col: frequency)
n_fft (int): The FFT size, which should be a power of 2.
hop (int): The hope size in samples.
iterations (int): Number of iterations for the Griffin-Lim algorithm.
(typically a few hundred is sufficien)
Returns:
The reconstructed time domain signal as a 1D numpy array.
"""
# Reconstruct the signal for the "size/length" of desired audio signal.
time_sample = librosa.istft(mag_spec, hop_length=hop,
win_length=n_fft, window='hanning')
# Initialize the reconstructed signal to noise.
x_reconstruct = np.random.randn(time_sample.shape[0])
n = iterations # number of iterations of Griffin-Lim algorithm.
while n > 0:
n -= 1
reconstruction_spectrogram = librosa.stft(x_reconstruct, n_fft=n_fft,
hop_length=hop, window='hanning')
reconstruction_angle = np.angle(reconstruction_spectrogram)
# Discard magnitude part of the reconstruction and use the supplied magnitude spectrogram instead.
proposal_spectrogram = mag_spec * np.exp(1.0j * reconstruction_angle)
prev_x = x_reconstruct
x_reconstruct = librosa.istft(proposal_spectrogram, hop_length=hop,
win_length=n_fft, window='hanning')
diff = sqrt(sum((x_reconstruct - prev_x)**2) / x_reconstruct.size)
print('Reconstruction iteration: {}/{} RMSE: {} '.format(iterations - n, iterations, diff))
return x_reconstruct
def write_midi(filepath, pianorolls, program_nums=None, is_drums=None,
track_names=None, velocity=100, tempo=40.0, beat_resolution=24):
if not os.path.exists(filepath):
os.makedirs(filepath)
if not np.issubdtype(pianorolls.dtype, np.bool_):
raise TypeError("Support only binary-valued piano-rolls")
if isinstance(program_nums, int):
program_nums = [program_nums]
if isinstance(is_drums, int):
is_drums = [is_drums]
if pianorolls.shape[2] != len(program_nums):
raise ValueError("`pianorolls` and `program_nums` must have the same"
"length")
if pianorolls.shape[2] != len(is_drums):
raise ValueError("`pianorolls` and `is_drums` must have the same"
"length")
if program_nums is None:
program_nums = [0] * len(pianorolls)
if is_drums is None:
is_drums = [False] * len(pianorolls)
multitrack = Multitrack(beat_resolution=beat_resolution, tempo=tempo)
for idx in range(pianorolls.shape[2]):
#plt.subplot(10,1,idx+1)
#plt.imshow(pianorolls[..., idx].T,cmap=plt.cm.binary, interpolation='nearest', aspect='auto')
if track_names is None:
track = Track(pianorolls[..., idx], program_nums[idx],
is_drums[idx])
else:
track = Track(pianorolls[..., idx], program_nums[idx],
is_drums[idx], track_names[idx])
multitrack.append_track(track)
#plt.savefig(cf.MP3Name)
multitrack.write(filepath)
def get_weight(Ytr):
mp = Ytr[:].sum(0).sum(1)
mmp = mp.astype(np.float32) / mp.sum()
cc=((mmp.mean() / mmp) * ((1-mmp)/(1 - mmp.mean())))**0.3
cc[3]=1
inverse_feq = torch.from_numpy(cc)
return inverse_feq
class Data2Torch(Dataset):
def __init__(self, data):
self.X = data[0]
self.YI = data[1]
self.YP = data[2]
self.YS = data[3]
self.XM = data[4]
def __getitem__(self, index):
rint = randint(0, len(self.XM)-1)
mX = torch.from_numpy(self.X[index]+self.XM[rint]).float()
mYI = torch.from_numpy(self.YI[index]).float()
mYP = torch.from_numpy(self.YP[index]).float()
mYS = torch.from_numpy(self.YS[index]).float()
return mX, mYI, mYP, mYS
def __len__(self):
return len(self.X)
def loss_func(pred, tar, gwe, name, isAdv):
we = gwe[0].cuda()
wwe = 10
we *= wwe
loss = 0
def inst_loss(inst_pred, inst_tar):
loss_i = 0
for idx, (out, fl_target) in enumerate(zip(inst_pred,inst_tar)):
twe = we.view(-1,1).repeat(1,fl_target.size(1)).type(torch.cuda.FloatTensor)
ttwe = twe * fl_target.data + (1 - fl_target.data) * 1
loss_fn = nn.BCEWithLogitsLoss(weight=ttwe, size_average=True)
loss_i += loss_fn(torch.squeeze(out), fl_target)
return loss_i
def pitch_loss(pit_pred, pit_tar):
loss_p = 0
for idx, (out, fl_target) in enumerate(zip(pit_pred,pit_tar)):
ttwe = 10 * fl_target.data + (1 - fl_target.data) * 1
loss_fn = nn.BCEWithLogitsLoss(weight=ttwe, size_average=True)
loss_p += loss_fn(out, fl_target)
return loss_p
def stream_loss(str_pred, str_tar):
loss_s = 0
for idx, (out, fl_target) in enumerate(zip(str_pred,str_tar)):
ttwe = 10 * fl_target.data + (1 - fl_target.data) * 1
loss_fn = nn.BCEWithLogitsLoss(weight=ttwe, size_average=True)
los = loss_fn(out, fl_target)
loss_s += los
return loss_s
l0,l1,l2,l3,l4=torch.zeros(1),torch.zeros(1),torch.zeros(1),torch.zeros(1),torch.zeros(1)
if 'Unet' in name:
if not isAdv:
l4 = stream_loss(pred[4],tar[2])*90
if 'preIP' in name:
l0 = inst_loss(pred[0],tar[0])
if 'prePP' in name:
l1 = pitch_loss(pred[1],tar[1])*9
else:
l3 = pitch_loss(pred[3],Variable(torch.zeros(pred[3].size())).cuda())*9
if 'Duo' in name:
if 'preIP' in name and not isAdv:
l0 = inst_loss(pred[0],F.max_pool1d(tar[0],2))
if 'prePP' in name and not isAdv:
l1 = pitch_loss(pred[1],F.max_pool1d(tar[1],2))
if 'preINZ' in name and isAdv:
l2 = inst_loss(pred[2],Variable(torch.zeros(pred[2].size())).cuda())*9
if 'prePNZ' in name and isAdv:
l3 = pitch_loss(pred[3],Variable(torch.zeros(pred[3].size())).cuda())*9
if 'preRoll' in name and not isAdv:
l4 = stream_loss(pred[4],F.max_pool2d(tar[2],(1,2)))*90
return l0,l1,l2,l3,l4
def model_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
init.xavier_uniform_(m.weight, gain=np.sqrt(2))
init.constant_(m.bias, 0)
elif classname.find('BatchNorm') != -1:
init.constant_(m.weight, 1)
init.constant_(m.bias, 0)
elif classname.find('Linear') != -1:
init.xavier_uniform_(m.weight, gain=np.sqrt(2))
| 38.462766
| 106
| 0.604204
|
79536ef63f58198cea811c93cdfdcd69cee9ecf6
| 6,303
|
py
|
Python
|
venv/lib/python3.8/site-packages/dateparser/data/date_translation_data/be.py
|
yuta-komura/vishnu
|
67173b674d5f4f3be189474103612447ef69ab44
|
[
"MIT"
] | 1
|
2021-11-17T04:55:14.000Z
|
2021-11-17T04:55:14.000Z
|
dateparser/data/date_translation_data/be.py
|
cool-RR/dateparser
|
c38336df521cc57d947dc2c9111539a72f801652
|
[
"BSD-3-Clause"
] | null | null | null |
dateparser/data/date_translation_data/be.py
|
cool-RR/dateparser
|
c38336df521cc57d947dc2c9111539a72f801652
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
info = {
"name": "be",
"date_order": "DMY",
"january": [
"студзень",
"сту",
"студзеня",
"Стд"
],
"february": [
"люты",
"лют",
"лютага"
],
"march": [
"сакавік",
"сак",
"сакавіка"
],
"april": [
"красавік",
"кра",
"красавіка",
"Крс"
],
"may": [
"май",
"мая",
"Траўня",
"Тра"
],
"june": [
"чэрвень",
"чэр",
"чэрвеня"
],
"july": [
"ліпень",
"ліп",
"ліпеня"
],
"august": [
"жнівень",
"жні",
"жніўня",
"Жнівеня",
"Жнв"
],
"september": [
"верасень",
"вер",
"верасня",
"Врс"
],
"october": [
"кастрычнік",
"кас",
"кастрычніка",
"Кст"
],
"november": [
"лістапад",
"ліс",
"лістапада",
"Ліс"
],
"december": [
"снежань",
"сне",
"снежня",
"Снж"
],
"monday": [
"панядзелак",
"пн",
"Пнд"
],
"tuesday": [
"аўторак",
"аў",
"Аўт"
],
"wednesday": [
"серада",
"ср"
],
"thursday": [
"чацвер",
"чц",
"Чцв",
"Чв"
],
"friday": [
"пятніца",
"пт",
"Пят"
],
"saturday": [
"субота",
"сб",
"Суб"
],
"sunday": [
"нядзеля",
"нд",
"Няд"
],
"am": [
"am"
],
"pm": [
"pm"
],
"year": [
"год",
"г",
"гады",
"года",
"гадоў"
],
"month": [
"месяц",
"мес",
"месяца",
"месяцы",
"месяцаў"
],
"week": [
"тыд",
"тыдзень",
"тыдня",
"тыдні",
"тыдняў"
],
"day": [
"дзень",
"д",
"дні",
"дзён",
"дзен"
],
"hour": [
"гадзіна",
"гадз",
"гадзіны",
"гадзіну",
"гадзін"
],
"minute": [
"хвіліна",
"хв",
"хвілін",
"хвіліны",
"хвіліну",
"хвіл"
],
"second": [
"секунда",
"с",
"секунды",
"секунду",
"секунд",
"сек"
],
"relative-type": {
"1 year ago": [
"у мінулым годзе"
],
"0 year ago": [
"у гэтым годзе"
],
"in 1 year": [
"у наступным годзе"
],
"1 month ago": [
"у мінулым месяцы"
],
"0 month ago": [
"у гэтым месяцы"
],
"in 1 month": [
"у наступным месяцы"
],
"1 week ago": [
"на мінулым тыдні"
],
"0 week ago": [
"на гэтым тыдні"
],
"in 1 week": [
"на наступным тыдні"
],
"1 day ago": [
"учора",
"ўчора"
],
"0 day ago": [
"сёння",
"сення"
],
"in 1 day": [
"заўтра"
],
"0 hour ago": [
"у гэту гадзіну"
],
"0 minute ago": [
"у гэту хвіліну"
],
"0 second ago": [
"цяпер"
],
"2 day ago": [
"пазаўчора"
]
},
"relative-type-regex": {
"in \\1 year": [
"праз (\\d+) год",
"праз (\\d+) года",
"праз (\\d+) г"
],
"\\1 year ago": [
"(\\d+) год таму",
"(\\d+) года таму",
"(\\d+) г таму"
],
"in \\1 month": [
"праз (\\d+) месяц",
"праз (\\d+) месяца",
"праз (\\d+) мес"
],
"\\1 month ago": [
"(\\d+) месяц таму",
"(\\d+) месяца таму",
"(\\d+) мес таму"
],
"in \\1 week": [
"праз (\\d+) тыдзень",
"праз (\\d+) тыдня",
"праз (\\d+) тыд"
],
"\\1 week ago": [
"(\\d+) тыдзень таму",
"(\\d+) тыдня таму",
"(\\d+) тыд таму"
],
"in \\1 day": [
"праз (\\d+) дзень",
"праз (\\d+) дня",
"праз (\\d+) д"
],
"\\1 day ago": [
"(\\d+) дзень таму",
"(\\d+) дня таму",
"(\\d+) д таму"
],
"in \\1 hour": [
"праз (\\d+) гадзіну",
"праз (\\d+) гадзіны",
"праз (\\d+) гадз"
],
"\\1 hour ago": [
"(\\d+) гадзіну таму",
"(\\d+) гадзіны таму",
"(\\d+) гадз таму"
],
"in \\1 minute": [
"праз (\\d+) хвіліну",
"праз (\\d+) хвіліны",
"праз (\\d+) хв"
],
"\\1 minute ago": [
"(\\d+) хвіліну таму",
"(\\d+) хвіліны таму",
"(\\d+) хв таму"
],
"in \\1 second": [
"праз (\\d+) секунду",
"праз (\\d+) секунды",
"праз (\\d+) с"
],
"\\1 second ago": [
"(\\d+) секунду таму",
"(\\d+) секунды таму",
"(\\d+) с таму"
]
},
"locale_specific": {},
"skip": [
"каля",
"у",
"ў",
"і",
" ",
".",
",",
";",
"-",
"/",
"'",
"|",
"@",
"[",
"]",
","
],
"sentence_splitter_group": 1,
"ago": [
"таму назад",
"таму",
"назад"
],
"in": [
"на працягу"
],
"simplifications": [
{
"^гадзіна": "1 гадзіна"
},
{
"гадзіну": "1 гадзіну"
},
{
"^хвіліну": "1 хвіліну"
},
{
"^секунду": "1 секунду"
},
{
"некалькі секунд": "44 секунды"
},
{
"некалькі хвілін": "2 хвіліны"
},
{
"(\\d+)\\s*гадзін\\s(\\d+)\\s*хвілін": "\\1:\\2"
}
]
}
| 18.429825
| 60
| 0.282088
|
79536fe6d23d9300fef187569f1e3dd1c238fd63
| 87
|
py
|
Python
|
test/run/t539.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t539.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t539.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
print type(int(3999999999.0))
print int(3999999999.0)
# print type(int(pow(2,53) + 2))
| 21.75
| 32
| 0.701149
|
7953700fbabb98154bdc153f27e306bee672e00e
| 5,864
|
py
|
Python
|
docs/source/conf.py
|
blakeboswell/valence
|
0256489804b4c4344bcdbe0de3632896b3d12d5d
|
[
"BSD-3-Clause"
] | 1
|
2019-07-01T07:27:13.000Z
|
2019-07-01T07:27:13.000Z
|
docs/source/conf.py
|
blakeboswell/valence
|
0256489804b4c4344bcdbe0de3632896b3d12d5d
|
[
"BSD-3-Clause"
] | 15
|
2017-07-16T07:14:49.000Z
|
2017-08-28T04:26:40.000Z
|
docs/source/conf.py
|
blakeboswell/valence
|
0256489804b4c4344bcdbe0de3632896b3d12d5d
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'pyvalence'
copyright = '2018, audere-labs'
author = 'audere-labs'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '0.0.1.2'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'pyvalencedoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'pyvalence.tex', 'pyvalence Documentation',
'audere-labs', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pyvalence', 'pyvalence Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'pyvalence', 'pyvalence Documentation',
author, 'pyvalence', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
| 29.918367
| 79
| 0.644952
|
795370adc0852c94152e62abae7c2675ad5d9490
| 6,733
|
py
|
Python
|
utils/lda.py
|
czyczyyzc/WeiboSpider
|
41b9c97cb01d41cb4a62efdd452451b5ef25bdbc
|
[
"MIT"
] | 2
|
2021-03-26T03:02:52.000Z
|
2021-04-01T11:08:46.000Z
|
utils/lda.py
|
czyczyyzc/WeiboSpider
|
41b9c97cb01d41cb4a62efdd452451b5ef25bdbc
|
[
"MIT"
] | null | null | null |
utils/lda.py
|
czyczyyzc/WeiboSpider
|
41b9c97cb01d41cb4a62efdd452451b5ef25bdbc
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
import os
import csv
import random
import numpy as np
from collections import OrderedDict
class Document(object):
def __init__(self):
self.words = []
self.length = 0
class StatisticalData(object):
def __init__(self):
self.docs_count = 0
self.words_count = 0
self.docs = []
self.word2id = OrderedDict()
self.id2word = OrderedDict()
class LDAModel(object):
def __init__(self, file_dir, K=10, alpha=0.5, beta=0.1, iter_times=1000, top_words_num=20):
self.K = K
self.beta = beta
self.alpha = alpha
self.iter_times = iter_times
self.top_words_num = top_words_num
self.train_data_file = os.path.join(file_dir, 'keywords.txt')
self.topic_word_file = os.path.join(file_dir, 'topic_word_file.csv')
self.topic_docs_file = os.path.join(file_dir, 'topic_docs_file.csv')
self.topic_docs_word = os.path.join(file_dir, 'topic_docs_word.txt')
self.data = self.preprocessing()
self.p = np.zeros(self.K)
self.nw = np.zeros((self.data.words_count, self.K), dtype='int')
self.nd = np.zeros((self.data.docs_count, self.K), dtype='int')
self.nwsum = np.zeros(self.K, dtype='int')
self.ndsum = np.zeros(self.data.docs_count, dtype='int')
self.Z = np.array([[0 for y in range(self.data.docs[x].length)] for x in range(self.data.docs_count)], dtype=object)
for x in range(len(self.Z)):
self.ndsum[x] = self.data.docs[x].length
for y in range(self.data.docs[x].length):
topic = random.randint(0, self.K - 1)
self.Z[x][y] = topic
self.nw[self.data.docs[x].words[y]][topic] += 1
self.nd[x][topic] += 1
self.nwsum[topic] += 1
self.theta = np.array([[0.0 for y in range(self.K)] for x in range(self.data.docs_count)])
self.phi = np.array([[0.0 for y in range(self.data.words_count)] for x in range(self.K)])
self.top_words_num = min(self.top_words_num, self.data.words_count)
def preprocessing(self):
print("Loading data for lda analysis ...")
with open(self.train_data_file, 'r', encoding='utf-8-sig', newline='') as f:
lines = f.readlines()
lines = [line.strip().split() for line in lines]
print("Data loading is finished!")
print("Generating the statistical data ...")
data = StatisticalData()
items_idx = 0
for line in lines:
if len(line) == 0:
continue
doc = Document()
for item in line:
if item in data.word2id:
doc.words.append(data.word2id[item])
else:
data.word2id[item] = items_idx
doc.words.append(items_idx)
items_idx += 1
doc.length = len(line)
data.docs.append(doc)
print("Data generation is finished!")
data.id2word = OrderedDict({value: key for key, value in data.word2id.items()})
data.docs_count = len(data.docs)
data.words_count = len(data.word2id)
print("There are total {:d} documents in total.".format(data.docs_count))
return data
def sampling(self, i, j):
topic = self.Z[i][j]
word = self.data.docs[i].words[j]
self.nw[word][topic] -= 1
self.nd[i][topic] -= 1
self.nwsum[topic] -= 1
self.ndsum[i] -= 1
Vbeta = self.data.words_count * self.beta
Kalpha = self.K * self.alpha
self.p = (self.nw[word] + self.beta) / (self.nwsum + Vbeta) * \
(self.nd[i] + self.alpha) / (self.ndsum[i] + Kalpha)
p = np.squeeze(np.asarray(self.p / np.sum(self.p)))
topic = np.argmax(np.random.multinomial(1, p))
self.nw[word][topic] += 1
self.nd[i][topic] += 1
self.nwsum[topic] += 1
self.ndsum[i] += 1
return topic
def __call__(self):
print("Training for LDA ...")
for x in range(self.iter_times):
if x % 1 == 0:
print("Iteration {:d}".format(x))
for i in range(self.data.docs_count):
for j in range(self.data.docs[i].length):
topic = self.sampling(i, j)
self.Z[i][j] = topic
print("Training is finished!")
print("Calculating the distribution of documents and topics ...")
for i in range(self.data.docs_count):
self.theta[i] = (self.nd[i] + self.alpha) / (self.ndsum[i] + self.K * self.alpha)
print("Calculating the distribution of words and topics ...")
for i in range(self.K):
self.phi[i] = (self.nw.T[i] + self.beta) / (self.nwsum[i] + self.data.words_count * self.beta)
print("Calculation is Finished!")
print("The distribution of topics and top {:d} words are saving to {:s}".format(self.K, self.topic_word_file))
with open(self.topic_word_file, 'w', encoding='utf-8-sig', newline='') as f:
topic_word_writer = csv.writer(f, dialect='excel')
for x in range(self.K):
topic_words = [(n, self.phi[x][n]) for n in range(self.data.words_count)]
topic_words.sort(key=lambda word: word[1], reverse=True)
topic_words = [(self.data.id2word[topic_words[y][0]], str(topic_words[y][1]))
for y in range(self.top_words_num)]
topic_words = list(zip(*topic_words))
topic_word_writer.writerow(['主题{:d}'.format(x)] + list(topic_words[0]))
topic_word_writer.writerow(['概率'] + list(topic_words[1]))
print("The distribution of topics and documents are saving to {:s}".format(self.topic_docs_file))
with open(self.topic_docs_file, 'w', encoding='utf-8-sig', newline='') as f:
topic_docs_writer = csv.writer(f, dialect='excel')
topic_docs_writer.writerow([''] + ['主题{:d}'.format(y) for y in range(self.K)])
for x in range(self.data.docs_count):
topic_docs = [str(self.theta[x][y]) for y in range(self.K)]
topic_docs_writer.writerow(['文档{:d}'.format(x)] + topic_docs)
with open(self.topic_docs_word, 'w', encoding='utf-8-sig', newline='') as f:
for x in range(self.data.docs_count):
for y in range(self.data.docs[x].length):
f.write(self.data.id2word[self.data.docs[x].words[y]] + ':' +
'主题{:d}'.format(self.Z[x][y]) + ' ')
f.write('\n')
print("Saving is finished!")
| 43.720779
| 128
| 0.56379
|
795370b181690b35191cf045255cd57b67b51efd
| 953
|
py
|
Python
|
python/p010.py
|
josola/project-euler
|
3ef2366af5575806b2e0e604989b0f3e59538475
|
[
"MIT"
] | null | null | null |
python/p010.py
|
josola/project-euler
|
3ef2366af5575806b2e0e604989b0f3e59538475
|
[
"MIT"
] | 6
|
2021-08-03T20:09:29.000Z
|
2021-08-03T20:55:09.000Z
|
python/p010.py
|
josola/project-euler
|
3ef2366af5575806b2e0e604989b0f3e59538475
|
[
"MIT"
] | null | null | null |
# Project Euler
# Problem 10 - Summation of primes
# (c) 2020-2022 Jordan Sola. All rights reserved. (MIT License)
# Written by Jordan Sola 2020-2021
def compute(LIMIT):
# - Uses Sieve of Eratosthenes to eliminate composite
# numbers up to the limit.
# - The sieve tracks previously marked primes without increasing
# the computation time unnecessarily. This allows the sieve to
# jump ahead to the square of the current prime and
# remove all the factors of the current prime.
prime = [True] * LIMIT
i = 2
while (i * i) <= LIMIT:
if prime[i] == True:
j = i * i
while j <= LIMIT - 1:
prime[j] = False
j += i
i += 1
sum = 0
for i in range(2, LIMIT):
if prime[i] == True:
sum += i
return sum
if __name__ == "__main__":
print(compute(2000000))
# Answer: 142'913'828'922
# Asymptotic complexity: O(N LogN)
# M1 (3.2 GHz CPU) ARMv8-A64 (64 bit): 1 loop, best of 5: 314 msec per loop
| 22.162791
| 85
| 0.644281
|
79537175e8ba0d7e97ff14e054f2b5ef8c8154c6
| 1,529
|
py
|
Python
|
scripts/odp_s3.py
|
CCI-Tools/zarr-cache
|
23c873966e06440b65406fd633b0c8fe63ca70a1
|
[
"MIT"
] | null | null | null |
scripts/odp_s3.py
|
CCI-Tools/zarr-cache
|
23c873966e06440b65406fd633b0c8fe63ca70a1
|
[
"MIT"
] | null | null | null |
scripts/odp_s3.py
|
CCI-Tools/zarr-cache
|
23c873966e06440b65406fd633b0c8fe63ca70a1
|
[
"MIT"
] | null | null | null |
import s3fs
import os.path
import xarray as xr
from xcube_cci.cciodp import CciOdp
from xcube_cci.chunkstore import CciChunkStore
from zarr_cache import CachedStore, S3StoreOpener
from zarr_cache import IndexedCacheStorage
from zarr_cache import MemoryStoreIndex
odp = CciOdp()
dataset_names_path = 'dataset_names.txt'
if not os.path.exists(dataset_names_path):
with open(dataset_names_path, 'w') as fp:
fp.writelines(map(lambda s: s + '\n', sorted(odp.dataset_names)))
bucket_name = "cciodp-cache-v1"
# with open('jasmin-os-credentials.json') as fp:
# credentials = json.load(fp)
# s3 = s3fs.S3FileSystem(anon=False,
# key=credentials['aws_access_key_id'],
# secret=credentials['aws_secret_access_key'],
# client_kwargs=dict(endpoint_url=credentials['endpoint_url']))
s3 = s3fs.S3FileSystem()
if not s3.isdir(bucket_name):
s3.mkdir(bucket_name)
store_index = MemoryStoreIndex()
store_cache = IndexedCacheStorage(store_index=store_index,
store_opener=S3StoreOpener(bucket_name + "/{store_id}.zarr", s3=s3))
def open_cached_dataset(ds_id):
original_store = CciChunkStore(odp, ds_id)
cached_store = CachedStore(original_store, ds_id, store_cache)
return xr.open_zarr(cached_store)
# ds = open_cached_dataset('esacci.OZONE.mon.L3.NP.multi-sensor.multi-platform.MERGED.fv0002.r1')
# ds = open_cached_dataset('esacci.CLOUD.mon.L3C.CLD_PRODUCTS.multi-sensor.multi-platform.AVHRR-PM.3-0.r1')
| 36.404762
| 107
| 0.725311
|
7953719b935ddf911aad3f2db415a5b801c27415
| 444
|
py
|
Python
|
resource-3/banking-card-pin/same-pin-digits/random.py
|
dragonwolverines/Datastructures-Python
|
a3feb13107736e1dab4c5a2e8dc137a6befc2e1b
|
[
"BSD-2-Clause"
] | null | null | null |
resource-3/banking-card-pin/same-pin-digits/random.py
|
dragonwolverines/Datastructures-Python
|
a3feb13107736e1dab4c5a2e8dc137a6befc2e1b
|
[
"BSD-2-Clause"
] | null | null | null |
resource-3/banking-card-pin/same-pin-digits/random.py
|
dragonwolverines/Datastructures-Python
|
a3feb13107736e1dab4c5a2e8dc137a6befc2e1b
|
[
"BSD-2-Clause"
] | null | null | null |
# What is the probability of one specific person to have the same pin digits with the person next to him?
# Assume that the pin is four digits
# Generating a random process
from random import randint
def rand_pin_digits():
a = '{0:04}'.format(randint(0, 10000))
a = set(a)
return a
match = 0
for i in range(10**6):
a = rand_pin_digits()
b = rand_pin_digits()
if a == b:
match += 1
print(match)
| 20.181818
| 105
| 0.637387
|
795373012cb217f71abbabe62f60c733a7c26ff8
| 980
|
py
|
Python
|
src/python/QueryParquetFile.py
|
397090770/learning-spark
|
42fd8baefc4ee215d6acd826b8999b525b7669a6
|
[
"MIT"
] | null | null | null |
src/python/QueryParquetFile.py
|
397090770/learning-spark
|
42fd8baefc4ee215d6acd826b8999b525b7669a6
|
[
"MIT"
] | null | null | null |
src/python/QueryParquetFile.py
|
397090770/learning-spark
|
42fd8baefc4ee215d6acd826b8999b525b7669a6
|
[
"MIT"
] | 2
|
2017-09-28T06:54:58.000Z
|
2018-10-14T06:37:14.000Z
|
# Finds the names of people who like pandas from a parquet file
# consisting of name & favouriteAnimal.
# For input you can use the result of MakeParquetFile
from pyspark import SparkContext
from pyspark.sql import SQLContext
import json
import sys
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Error usage: QueryParquetFile [sparkmaster] [parquetfile]"
sys.exit(-1)
master = sys.argv[1]
parquetFile = sys.argv[2]
sc = SparkContext(master, "QueryParquetFile")
sqlCtx = SQLContext(sc)
# Load some data in from a Parquet file of name & favouriteAnimal
rows = sqlCtx.parquetFile(parquetFile)
names = rows.map(lambda row: row.name)
print "Everyone"
print names.collect()
# Find the panda lovers
tbl = rows.registerAsTable("people")
pandaFriends = sqlCtx.sql("SELECT name FROM people WHERE favouriteAnimal = \"panda\"")
print "Panda Friends"
print pandaFriends.map(lambda row: row.name).collect()
| 36.296296
| 90
| 0.708163
|
795373ba27f60e4e5b06aaf0acc6497fb20ff6c4
| 12,726
|
py
|
Python
|
pysnmp/ERI-DNX-PERF-MONT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/ERI-DNX-PERF-MONT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/ERI-DNX-PERF-MONT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module ERI-DNX-PERF-MONT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ERI-DNX-PERF-MONT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:51:47 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint")
LinkPortAddress, dnx = mibBuilder.importSymbols("ERI-DNX-SMC-MIB", "LinkPortAddress", "dnx")
eriMibs, = mibBuilder.importSymbols("ERI-ROOT-SMI", "eriMibs")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
ObjectIdentity, NotificationType, Bits, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Unsigned32, ModuleIdentity, TimeTicks, Counter64, Gauge32, Counter32, IpAddress, iso = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "NotificationType", "Bits", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Unsigned32", "ModuleIdentity", "TimeTicks", "Counter64", "Gauge32", "Counter32", "IpAddress", "iso")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
eriDNXLinkPMStatsMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 644, 3, 7))
if mibBuilder.loadTexts: eriDNXLinkPMStatsMIB.setLastUpdated('200204110000Z')
if mibBuilder.loadTexts: eriDNXLinkPMStatsMIB.setOrganization('Eastern Research, Inc.')
performanceMonitoring = MibIdentifier((1, 3, 6, 1, 4, 1, 644, 2, 4, 4))
dsx1Esf = MibIdentifier((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1))
dsx1G826 = MibIdentifier((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2))
dsx1EsfCurrTable = MibTable((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1), )
if mibBuilder.loadTexts: dsx1EsfCurrTable.setStatus('current')
dsx1EsfCurrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1, 1), ).setIndexNames((0, "ERI-DNX-PERF-MONT-MIB", "dsx1EsfCurrLinkAddr"))
if mibBuilder.loadTexts: dsx1EsfCurrEntry.setStatus('current')
dsx1EsfCurrLinkAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1, 1, 1), LinkPortAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1EsfCurrLinkAddr.setStatus('current')
dsx1EsfCurrResrcId = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1EsfCurrResrcId.setStatus('current')
dsx1EsfCurrESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1EsfCurrESs.setStatus('current')
dsx1EsfCurrUASs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1EsfCurrUASs.setStatus('current')
dsx1EsfCurrSESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1, 1, 5), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1EsfCurrSESs.setStatus('current')
dsx1EsfCurrBESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1, 1, 6), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1EsfCurrBESs.setStatus('current')
dsx1EsfCurrLOFs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1, 1, 7), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1EsfCurrLOFs.setStatus('current')
dsx1EsfCurrSeconds = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1, 1, 8), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1EsfCurrSeconds.setStatus('current')
dsx1EsfCurrIntervals = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1, 1, 9), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 96))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1EsfCurrIntervals.setStatus('current')
dsx1EsfCurrStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 1, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1EsfCurrStatus.setStatus('current')
dsx1Esf24HrTable = MibTable((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 2), )
if mibBuilder.loadTexts: dsx1Esf24HrTable.setStatus('current')
dsx1Esf24HrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 2, 1), ).setIndexNames((0, "ERI-DNX-PERF-MONT-MIB", "dsx1Esf24HrLinkAddr"))
if mibBuilder.loadTexts: dsx1Esf24HrEntry.setStatus('current')
dsx1Esf24HrLinkAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 2, 1, 1), LinkPortAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf24HrLinkAddr.setStatus('current')
dsx1Esf24HrResrcId = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf24HrResrcId.setStatus('current')
dsx1Esf24HrESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 2, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf24HrESs.setStatus('current')
dsx1Esf24HrUASs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 2, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf24HrUASs.setStatus('current')
dsx1Esf24HrSESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 2, 1, 5), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf24HrSESs.setStatus('current')
dsx1Esf24HrBESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 2, 1, 6), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf24HrBESs.setStatus('current')
dsx1Esf24HrLOFs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 2, 1, 7), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf24HrLOFs.setStatus('current')
dsx1Esf96RegTable = MibTable((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 3), )
if mibBuilder.loadTexts: dsx1Esf96RegTable.setStatus('current')
dsx1Esf96RegEntry = MibTableRow((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 3, 1), ).setIndexNames((0, "ERI-DNX-PERF-MONT-MIB", "dsx1Esf96RegLinkAddr"), (0, "ERI-DNX-PERF-MONT-MIB", "dsx1Esf96RegInterval"))
if mibBuilder.loadTexts: dsx1Esf96RegEntry.setStatus('current')
dsx1Esf96RegLinkAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 3, 1, 1), LinkPortAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf96RegLinkAddr.setStatus('current')
dsx1Esf96RegInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 96))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf96RegInterval.setStatus('current')
dsx1Esf96RegESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 3, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf96RegESs.setStatus('current')
dsx1Esf96RegUASs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 3, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf96RegUASs.setStatus('current')
dsx1Esf96RegSESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 3, 1, 5), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf96RegSESs.setStatus('current')
dsx1Esf96RegBESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 1, 3, 1, 6), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1Esf96RegBESs.setStatus('current')
dsx1G826Table = MibTable((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1), )
if mibBuilder.loadTexts: dsx1G826Table.setStatus('current')
dsx1G826Entry = MibTableRow((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1), ).setIndexNames((0, "ERI-DNX-PERF-MONT-MIB", "dsx1G826LinkAddr"))
if mibBuilder.loadTexts: dsx1G826Entry.setStatus('current')
dsx1G826LinkAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 1), LinkPortAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826LinkAddr.setStatus('current')
dsx1G826ResrcId = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826ResrcId.setStatus('current')
dsx1G826TotalTime = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dsx1G826TotalTime.setStatus('current')
dsx1G826ESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826ESs.setStatus('current')
dsx1G826ErrFSs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826ErrFSs.setStatus('current')
dsx1G826SESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826SESs.setStatus('current')
dsx1G826ConsecSESs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826ConsecSESs.setStatus('current')
dsx1G826ConsecErrFSs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826ConsecErrFSs.setStatus('current')
dsx1G826BGErrBlocks = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826BGErrBlocks.setStatus('current')
dsx1G826ESRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826ESRatio.setStatus('current')
dsx1G826SESRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826SESRatio.setStatus('current')
dsx1G826BgBERRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826BgBERRatio.setStatus('current')
dsx1G826UASs = MibTableColumn((1, 3, 6, 1, 4, 1, 644, 2, 4, 4, 2, 1, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsx1G826UASs.setStatus('current')
mibBuilder.exportSymbols("ERI-DNX-PERF-MONT-MIB", dsx1G826Table=dsx1G826Table, dsx1EsfCurrStatus=dsx1EsfCurrStatus, eriDNXLinkPMStatsMIB=eriDNXLinkPMStatsMIB, dsx1G826ESs=dsx1G826ESs, dsx1EsfCurrLOFs=dsx1EsfCurrLOFs, dsx1Esf96RegLinkAddr=dsx1Esf96RegLinkAddr, dsx1G826ConsecSESs=dsx1G826ConsecSESs, dsx1EsfCurrEntry=dsx1EsfCurrEntry, dsx1Esf24HrResrcId=dsx1Esf24HrResrcId, dsx1EsfCurrTable=dsx1EsfCurrTable, dsx1G826BGErrBlocks=dsx1G826BGErrBlocks, dsx1EsfCurrESs=dsx1EsfCurrESs, dsx1Esf24HrSESs=dsx1Esf24HrSESs, dsx1Esf=dsx1Esf, dsx1Esf96RegSESs=dsx1Esf96RegSESs, performanceMonitoring=performanceMonitoring, dsx1Esf24HrLOFs=dsx1Esf24HrLOFs, dsx1G826ESRatio=dsx1G826ESRatio, dsx1G826LinkAddr=dsx1G826LinkAddr, dsx1EsfCurrSESs=dsx1EsfCurrSESs, dsx1EsfCurrSeconds=dsx1EsfCurrSeconds, dsx1Esf24HrTable=dsx1Esf24HrTable, dsx1Esf24HrLinkAddr=dsx1Esf24HrLinkAddr, dsx1G826ErrFSs=dsx1G826ErrFSs, dsx1G826UASs=dsx1G826UASs, dsx1EsfCurrResrcId=dsx1EsfCurrResrcId, dsx1G826Entry=dsx1G826Entry, dsx1G826SESRatio=dsx1G826SESRatio, dsx1EsfCurrBESs=dsx1EsfCurrBESs, dsx1Esf96RegEntry=dsx1Esf96RegEntry, dsx1G826=dsx1G826, dsx1Esf96RegTable=dsx1Esf96RegTable, dsx1Esf96RegInterval=dsx1Esf96RegInterval, dsx1Esf96RegESs=dsx1Esf96RegESs, dsx1EsfCurrIntervals=dsx1EsfCurrIntervals, dsx1EsfCurrUASs=dsx1EsfCurrUASs, dsx1G826ConsecErrFSs=dsx1G826ConsecErrFSs, dsx1Esf24HrESs=dsx1Esf24HrESs, dsx1G826TotalTime=dsx1G826TotalTime, dsx1Esf96RegUASs=dsx1Esf96RegUASs, PYSNMP_MODULE_ID=eriDNXLinkPMStatsMIB, dsx1EsfCurrLinkAddr=dsx1EsfCurrLinkAddr, dsx1Esf24HrBESs=dsx1Esf24HrBESs, dsx1G826ResrcId=dsx1G826ResrcId, dsx1Esf24HrEntry=dsx1Esf24HrEntry, dsx1G826SESs=dsx1G826SESs, dsx1Esf24HrUASs=dsx1Esf24HrUASs, dsx1G826BgBERRatio=dsx1G826BgBERRatio, dsx1Esf96RegBESs=dsx1Esf96RegBESs)
| 114.648649
| 1,758
| 0.753968
|
795375074c4ccb8577eac3bb4f3649a6bcc6c595
| 3,706
|
py
|
Python
|
scripts/config.py
|
rohankumardubey/ramcloud
|
3a30ba5edc4a81d5e12ab20fda0360cb9bacf50f
|
[
"0BSD"
] | 5
|
2015-11-14T16:49:06.000Z
|
2019-09-03T13:21:30.000Z
|
scripts/config.py
|
rohankumardubey/ramcloud
|
3a30ba5edc4a81d5e12ab20fda0360cb9bacf50f
|
[
"0BSD"
] | null | null | null |
scripts/config.py
|
rohankumardubey/ramcloud
|
3a30ba5edc4a81d5e12ab20fda0360cb9bacf50f
|
[
"0BSD"
] | 1
|
2018-02-25T11:16:27.000Z
|
2018-02-25T11:16:27.000Z
|
#!/usr/bin/env python
# Copyright (c) 2011 Stanford University
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR(S) DISCLAIM ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL AUTHORS BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
This module defines a collection of variables that specify site-specific
configuration information such as names of RAMCloud hosts and the location
of RAMCloud binaries. This should be the only file you have to modify to
run RAMCloud scripts at your site.
"""
from common import captureSh
import os
import re
import subprocess
import sys
__all__ = ['coordinator_port', 'default_disk1','default_disk2', 'git_branch',
'hosts', 'obj_dir', 'obj_path', 'scripts_path', 'second_backup_port',
'server_port', 'top_path']
# git_branch is the name of the current git branch, which is used
# for purposes such as computing objDir.
try:
git_branch = re.search('^refs/heads/(.*)$',
captureSh('git symbolic-ref -q HEAD 2>/dev/null'))
except subprocess.CalledProcessError:
git_branch = None
obj_dir = 'obj'
else:
git_branch = git_branch.group(1)
obj_dir = 'obj.%s' % git_branch
# obj_dir is the name of the directory containing binaries for the current
# git branch (it's just a single name such as "obj.master", not a full path)
if git_branch == None:
obj_dir = 'obj'
else:
obj_dir = 'obj.%s' % git_branch
# The full path name of the directory containing this script file.
scripts_path = os.path.dirname(os.path.abspath(__file__))
# The full pathname of the parent of scriptsPath (the top-level directory
# of a RAMCloud source tree).
top_path = os.path.abspath(scripts_path + '/..')
# Add /usr/local/lib to LD_LIBARY_PATH it isn't already there (this was
# needed for CentOS 5.5, but should probably be deleted now).
try:
ld_library_path = os.environ['LD_LIBRARY_PATH'].split(':')
except KeyError:
ld_library_path = []
if '/usr/local/lib' not in ld_library_path:
ld_library_path.insert(0, '/usr/local/lib')
os.environ['LD_LIBRARY_PATH'] = ':'.join(ld_library_path)
# All of the hosts available for servers or clients; each entry
# consists of a name for the host (for ssh), an IP address
# to use for creating service locators. and an id for generating
# Ethernet addresses.
hosts = []
for i in range(1, 61):
hosts.append(('rc%02d' % i,
'192.168.1.%d' % (100 + i),
i))
# Host on which old master is run for running recoveries.
# Need not be a member of hosts
old_master_host = ('rcmaster', '192.168.1.1', 81)
# Full path to the directory containing RAMCloud executables.
obj_path = '%s/%s' % (top_path, obj_dir)
# Ports (for TCP, etc.) to use for each kind of server.
coordinator_port = 12246
server_port = 12247
second_backup_port = 12248
# Command-line argument specifying where the first backup on each
# server should storage the segment replicas.
default_disk1 = '-f /dev/sda2'
# Command-line argument specifying where the second backup should
# store its segment replicas.
default_disk2 = '-f /dev/sdb2'
# Try to include local overrides.
try:
from localconfig import *
except:
pass
| 35.295238
| 77
| 0.726929
|
795377ac4369812cc266aa4a1ff27f0cf1692128
| 4,453
|
py
|
Python
|
examples/demo_bandits/plot_compare_index_bandits.py
|
TimotheeMathieu/rlberry
|
a351ead4209d3f95c1327e8140a83d6bc0214d40
|
[
"MIT"
] | null | null | null |
examples/demo_bandits/plot_compare_index_bandits.py
|
TimotheeMathieu/rlberry
|
a351ead4209d3f95c1327e8140a83d6bc0214d40
|
[
"MIT"
] | null | null | null |
examples/demo_bandits/plot_compare_index_bandits.py
|
TimotheeMathieu/rlberry
|
a351ead4209d3f95c1327e8140a83d6bc0214d40
|
[
"MIT"
] | null | null | null |
"""
=============================================================
Comparison subplots of various index based bandits algorithms
=============================================================
This script Compare several bandits agents and as a sub-product also shows
how to use subplots in with `plot_writer_data`
"""
import numpy as np
import matplotlib.pyplot as plt
from rlberry.envs.bandits import BernoulliBandit
from rlberry.manager import AgentManager, plot_writer_data
from rlberry.wrappers import WriterWrapper
from rlberry.agents.bandits import (
IndexAgent,
RandomizedAgent,
makeBoundedIMEDIndex,
makeBoundedMOSSIndex,
makeBoundedNPTSIndex,
makeBoundedUCBIndex,
makeETCIndex,
makeEXP3Index,
)
# Agents definition
# sphinx_gallery_thumbnail_number = 2
# Parameters of the problem
means = np.array([0.6, 0.6, 0.6, 0.9]) # means of the arms
A = len(means)
T = 2000 # Horizon
M = 10 # number of MC simu
# Construction of the experiment
env_ctor = BernoulliBandit
env_kwargs = {"p": means}
class UCBAgent(IndexAgent):
name = "UCB"
def __init__(self, env, **kwargs):
index, _ = makeBoundedUCBIndex()
IndexAgent.__init__(self, env, index, **kwargs)
self.env = WriterWrapper(
self.env, self.writer, write_scalar="action_and_reward"
)
class ETCAgent(IndexAgent):
name = "ETC"
def __init__(self, env, m=20, **kwargs):
index, _ = makeETCIndex(A, m)
IndexAgent.__init__(self, env, index, **kwargs)
self.env = WriterWrapper(
self.env, self.writer, write_scalar="action_and_reward"
)
class MOSSAgent(IndexAgent):
name = "MOSS"
def __init__(self, env, **kwargs):
index, _ = makeBoundedMOSSIndex(T, A)
IndexAgent.__init__(self, env, index, **kwargs)
self.env = WriterWrapper(
self.env, self.writer, write_scalar="action_and_reward"
)
class IMEDAgent(IndexAgent):
name = "IMED"
def __init__(self, env, **kwargs):
index, tracker_params = makeBoundedIMEDIndex()
IndexAgent.__init__(self, env, index, tracker_params=tracker_params, **kwargs)
self.env = WriterWrapper(
self.env, self.writer, write_scalar="action_and_reward"
)
class NPTSAgent(IndexAgent):
name = "NPTS"
def __init__(self, env, **kwargs):
index, tracker_params = makeBoundedNPTSIndex()
IndexAgent.__init__(self, env, index, tracker_params=tracker_params, **kwargs)
self.env = WriterWrapper(
self.env, self.writer, write_scalar="action_and_reward"
)
class EXP3Agent(RandomizedAgent):
name = "EXP3"
def __init__(self, env, **kwargs):
prob, tracker_params = makeEXP3Index()
RandomizedAgent.__init__(
self, env, prob, tracker_params=tracker_params, **kwargs
)
self.env = WriterWrapper(
self.env, self.writer, write_scalar="action_and_reward"
)
Agents_class = [
ETCAgent,
EXP3Agent,
IMEDAgent,
MOSSAgent,
NPTSAgent,
UCBAgent,
]
agents = [
AgentManager(
Agent,
(env_ctor, env_kwargs),
fit_budget=T,
n_fit=M,
parallelization="process",
mp_context="fork",
)
for Agent in Agents_class
]
# these parameters should give parallel computing even in notebooks
# Agent training
for agent in agents:
agent.fit()
# Compute and plot regret
def compute_regret(rewards):
return np.cumsum(np.max(means) - rewards)
# Compute and plot (pseudo-)regret
def compute_pseudo_regret(actions):
return np.cumsum(np.max(means) - means[actions.astype(int)])
output = plot_writer_data(
agents,
tag="action",
preprocess_func=compute_pseudo_regret,
title="Cumulative Pseudo-Regret",
)
output = plot_writer_data(
agents,
tag="reward",
preprocess_func=compute_regret,
title="Cumulative Regret",
)
# Compute and plot number of times each arm was selected
def compute_na(actions, a):
return np.cumsum(actions == a)
fig, axes = plt.subplots(2, 2, sharey=True, figsize=(6, 6))
axes = axes.ravel()
for arm in range(A):
output = plot_writer_data(
agents,
tag="action",
preprocess_func=lambda actions: compute_na(actions, arm),
title="Na for arm " + str(arm) + ", mean=" + str(means[arm]),
ax=axes[arm],
show=False,
)
fig.tight_layout()
plt.show()
| 24.60221
| 86
| 0.644285
|
79537817fb6a1dad4804313f694f55a7c60f370c
| 8,935
|
py
|
Python
|
powerline_shell/__init__.py
|
damageboy/powerline-shell
|
9395706d82a4073b0c2bf210ebf4953674813d17
|
[
"MIT"
] | null | null | null |
powerline_shell/__init__.py
|
damageboy/powerline-shell
|
9395706d82a4073b0c2bf210ebf4953674813d17
|
[
"MIT"
] | null | null | null |
powerline_shell/__init__.py
|
damageboy/powerline-shell
|
9395706d82a4073b0c2bf210ebf4953674813d17
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import os
import sys
import importlib
import json
from .utils import warn, py3, import_file
import re
def _current_dir():
"""Returns the full current working directory as the user would have used
in their shell (ie. without following symbolic links).
With the introduction of Bash for Windows, we can't use the PWD environment
variable very easily. `os.sep` for windows is `\` but the PWD variable will
use `/`. So just always use the `os` functions for dealing with paths. This
also is fine because the use of PWD below is done to avoid following
symlinks, which Windows doesn't have.
For non-Windows systems, prefer the PWD environment variable. Python's
`os.getcwd` function follows symbolic links, which is undesirable."""
if os.name == "nt":
return os.getcwd()
return os.getenv("PWD") or os.getcwd()
def get_valid_cwd():
"""Determine and check the current working directory for validity.
Typically, an directory arises when you checkout a different branch on git
that doesn't have this directory. When an invalid directory is found, a
warning is printed to the screen, but the directory is still returned
as-is, since this is what the shell considers to be the cwd."""
try:
cwd = _current_dir()
except:
warn("Your current directory is invalid. If you open a ticket at " +
"https://github.com/milkbikis/powerline-shell/issues/new " +
"we would love to help fix the issue.")
sys.stdout.write("> ")
sys.exit(1)
parts = cwd.split(os.sep)
up = cwd
while parts and not os.path.exists(up):
parts.pop()
up = os.sep.join(parts)
if cwd != up:
warn("Your current directory is invalid. Lowest valid directory: "
+ up)
return cwd
class Powerline(object):
symbols = {
'compatible': {
'lock': 'RO',
'network': 'SSH',
'separator': u'\u25B6',
'separator_thin': u'\u276F'
},
'patched': {
'lock': u'\uE0A2',
'network': 'SSH',
'separator': u'\uE0B0',
'separator_thin': u'\uE0B1'
},
'flat': {
'lock': u'\uE0A2',
'network': 'SSH',
'separator': '',
'separator_thin': ''
},
'angly1': {
'lock': u'\uE0A2',
'network': u'\uf983',
'separator': u'\uE0B8',
'separator_thin': u'\uE0B9'
},
'angly2': {
'lock': u'\uE0A2',
'network': u'\uf983',
'separator': u'\uE0BC',
'separator_thin': u'\uE0BD'
},
'curvy': {
'lock': u'\uE0A2',
'network': u'\uE0A2',
'separator': u'\uE0B4',
'separator_thin': u'\uE0B5'
},
'flames': {
'lock': u'\uE0A2',
'network': u'\uE0A2',
'separator': u'\uE0C0',
'separator_thin': u'\uE0C1'
},
'lego': {
'lock': u'\uE0A2',
'network': u'\uE0A2',
'separator': u'\uE0CE',
'separator_thin': u'\uE0CF'
},
'pixelated': {
'lock': u'\uE0A2',
'network': u'\uE0A2',
'separator': u'\uE0C6',
'separator_thin': u'\uE0C6'
}
}
color_templates = {
'bash': r'\[\e%s\]',
'tcsh': r'%%{\e%s%%}',
'zsh': '%%{%s%%}',
'bare': '%s',
}
def __init__(self, args, config, theme):
self.args = args
self.config = config
self.theme = theme
self.cwd = get_valid_cwd()
mode = config.get("mode", "patched")
self.color_template = self.color_templates[args.shell]
self.reset = self.color_template % '[0m'
self.lock = Powerline.symbols[mode]['lock']
self.network = Powerline.symbols[mode]['network']
self.separator = Powerline.symbols[mode]['separator']
self.separator_thin = Powerline.symbols[mode]['separator_thin']
self.segments = []
def segment_conf(self, seg_name, key, default=None):
return self.config.get(seg_name, {}).get(key, default)
def color(self, prefix, code):
if code is None:
return ''
elif code == self.theme.RESET:
return self.reset
else:
return self.color_template % ('[%s;5;%sm' % (prefix, code))
def fgcolor(self, code):
return self.color('38', code)
def bgcolor(self, code):
return self.color('48', code)
def append(self, content, fg, bg, separator=None, separator_fg=None, sanitize=True):
if self.args.shell == "bash" and sanitize:
content = re.sub(r"([`$])", r"\\\1", content)
self.segments.append((content, fg, bg,
separator if separator is not None else self.separator,
separator_fg if separator_fg is not None else bg))
def draw(self):
text = (''.join(self.draw_segment(i) for i in range(len(self.segments)))
+ self.reset) + ' '
if py3:
return text
else:
return text.encode('utf-8')
def draw_segment(self, idx):
segment = self.segments[idx]
next_segment = self.segments[idx + 1] if idx < len(self.segments)-1 else None
return ''.join((
self.fgcolor(segment[1]),
self.bgcolor(segment[2]),
segment[0],
self.bgcolor(next_segment[2]) if next_segment else self.reset,
self.fgcolor(segment[4]),
segment[3]))
def find_config():
for location in [
"powerline-shell.json",
"~/.powerline-shell.json",
os.path.join(os.environ.get("XDG_CONFIG_HOME", "~/.config"), "powerline-shell", "config.json"),
]:
full = os.path.expanduser(location)
if os.path.exists(full):
return full
DEFAULT_CONFIG = {
"segments": [
'virtual_env',
'username',
'hostname',
'ssh',
'cwd',
'git',
'hg',
'jobs',
'root',
]
}
class ModuleNotFoundException(Exception):
pass
class CustomImporter(object):
def __init__(self):
self.file_import_count = 0
def import_(self, module_prefix, module_or_file, description):
try:
mod = importlib.import_module(module_prefix + module_or_file)
except ImportError:
try:
module_name = "_custom_mod_{0}".format(self.file_import_count)
mod = import_file(module_name, os.path.expanduser(module_or_file))
self.file_import_count += 1
except (ImportError, IOError):
msg = "{0} {1} cannot be found".format(description, module_or_file)
raise ModuleNotFoundException( msg)
return mod
def main():
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--generate-config', action='store_true',
help='Generate the default config and print it to stdout')
arg_parser.add_argument('--shell', action='store', default='bash',
help='Set this to your shell type',
choices=['bash', 'tcsh', 'zsh', 'bare'])
arg_parser.add_argument('prev_error', nargs='?', type=int, default=0,
help='Error code returned by the last command')
args = arg_parser.parse_args()
if args.generate_config:
print(json.dumps(DEFAULT_CONFIG, indent=2))
return 0
config_path = find_config()
if config_path:
with open(config_path) as f:
try:
config = json.loads(f.read())
except Exception as e:
warn("Config file ({0}) could not be decoded! Error: {1}"
.format(config_path, e))
config = DEFAULT_CONFIG
else:
config = DEFAULT_CONFIG
custom_importer = CustomImporter()
theme_mod = custom_importer.import_(
"powerline_shell.themes.",
config.get("theme", "default"),
"Theme")
theme = getattr(theme_mod, "Color")
powerline = Powerline(args, config, theme)
segments = []
for seg_conf in config["segments"]:
if not isinstance(seg_conf, dict):
seg_conf = {"type": seg_conf}
seg_name = seg_conf["type"]
seg_mod = custom_importer.import_(
"powerline_shell.segments.",
seg_name,
"Segment")
segment = getattr(seg_mod, "Segment")(powerline, seg_conf)
segment.start()
segments.append(segment)
for segment in segments:
segment.add_to_powerline()
sys.stdout.write(powerline.draw())
return 0
| 32.02509
| 103
| 0.556128
|
79537a9523e9503864dfcfcf4413e5ea30661356
| 3,589
|
py
|
Python
|
detectors/all_rules_generator.py
|
stworekAsiorek/CRDiS
|
bb95860bbe75f84d7bac0b7ff982855ebaddf287
|
[
"MIT"
] | 1
|
2021-09-07T11:17:38.000Z
|
2021-09-07T11:17:38.000Z
|
detectors/all_rules_generator.py
|
KayJay624/SequenceGenerator
|
bb95860bbe75f84d7bac0b7ff982855ebaddf287
|
[
"MIT"
] | null | null | null |
detectors/all_rules_generator.py
|
KayJay624/SequenceGenerator
|
bb95860bbe75f84d7bac0b7ff982855ebaddf287
|
[
"MIT"
] | 1
|
2019-01-17T18:32:45.000Z
|
2019-01-17T18:32:45.000Z
|
# The MIT License
# Copyright (c) 2018 Kamil Jurek
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import numpy as np
from rules_generator import RulesGenerator
from rule_component import RuleComponent
from utils import *
class AllRulesGenerator(RulesGenerator):
def __init__(self, target_seq_index, window_size=0, round_to=100, combined=False):
super(AllRulesGenerator, self).__init__()
self.target_seq_index = target_seq_index
self.round_to = round_to
self.window_size = window_size
self.combined = combined
self.simulator = None
def generate_rules(self, window_begin, window_end, current_index):
generated_rules = [[] for i in range(len(self.simulator.sequences))]
for seq_index, change_point_list in enumerate(self.simulator.detected_change_points):
# if seq_index == self.target_seq_index:
# continue
generated_lhss = []
generated_rhss = []
points_before_window, points_in_window, points_after_window = \
self.get_change_points_in_window(seq_index, window_begin, window_end)
if not points_in_window:
generated_lhss = self.generate_lhss_for_empty_window(seq_index, points_before_window, window_begin, window_end)
else:
generated_lhss = self.generate_lhss(points_in_window, window_begin, window_end)
generated_rhss = self.generate_rhss()
if generated_lhss:
self.update_discovered_lhss(seq_index, current_index, generated_lhss)
self.generate_and_update_rules(seq_index, current_index,
generated_lhss, generated_rhss,
generated_rules)
if self.combined:
combined_rule = []
#[self.simulator.combined_rules.add((x,y,z)) if x.rhs == y.rhs and x.rhs == z.rhs
# else None for x in generated_rules[0] for y in generated_rules[1] for z in generated_rules[2]]
for seq_rules in generated_rules:
if seq_rules:
combined_rule.append(seq_rules[-1])
# print("seq_rule:", seq_rules[-1])
# for gr in seq_rules:
# print(gr)
# print("==============================================")
if len(combined_rule) > 0:
# print("Adding to combined rules")
self.simulator.combined_rules.add(tuple(combined_rule))
| 47.223684
| 127
| 0.656172
|
79537aee41e62f3c25efb16021971447f37811b5
| 2,475
|
py
|
Python
|
yt/frontends/moab/io.py
|
Xarthisius/yt
|
321643c3abff64a6f132d98d0747f3558f7552a3
|
[
"BSD-3-Clause-Clear"
] | 360
|
2017-04-24T05:06:04.000Z
|
2022-03-31T10:47:07.000Z
|
yt/frontends/moab/io.py
|
Xarthisius/yt
|
321643c3abff64a6f132d98d0747f3558f7552a3
|
[
"BSD-3-Clause-Clear"
] | 2,077
|
2017-04-20T20:36:07.000Z
|
2022-03-31T16:39:43.000Z
|
yt/frontends/moab/io.py
|
stonnes/yt
|
aad3cfa3b4ebab7838352ab467275a27c26ff363
|
[
"BSD-3-Clause-Clear"
] | 257
|
2017-04-19T20:52:28.000Z
|
2022-03-29T12:23:52.000Z
|
import numpy as np
from yt.funcs import mylog
from yt.utilities.io_handler import BaseIOHandler
def field_dname(field_name):
return f"/tstt/elements/Hex8/tags/{field_name}"
# TODO all particle bits were removed
class IOHandlerMoabH5MHex8(BaseIOHandler):
_dataset_type = "moab_hex8"
def __init__(self, ds):
super().__init__(ds)
self._handle = ds._handle
def _read_fluid_selection(self, chunks, selector, fields, size):
chunks = list(chunks)
assert len(chunks) == 1
fhandle = self._handle
rv = {}
for field in fields:
ftype, fname = field
rv[field] = np.empty(size, dtype=fhandle[field_dname(fname)].dtype)
ngrids = sum(len(chunk.objs) for chunk in chunks)
mylog.debug(
"Reading %s cells of %s fields in %s blocks",
size,
[fname for ft, fn in fields],
ngrids,
)
for field in fields:
ftype, fname = field
ds = np.array(fhandle[field_dname(fname)][:], dtype="float64")
ind = 0
for chunk in chunks:
for g in chunk.objs:
ind += g.select(selector, ds, rv[field], ind) # caches
return rv
class IOHandlerMoabPyneHex8(BaseIOHandler):
_dataset_type = "moab_hex8_pyne"
def _read_fluid_selection(self, chunks, selector, fields, size):
chunks = list(chunks)
assert len(chunks) == 1
rv = {}
pyne_mesh = self.ds.pyne_mesh
for field in fields:
rv[field] = np.empty(size, dtype="float64")
ngrids = sum(len(chunk.objs) for chunk in chunks)
mylog.debug(
"Reading %s cells of %s fields in %s blocks",
size,
[fname for ftype, fname in fields],
ngrids,
)
for field in fields:
ftype, fname = field
if pyne_mesh.structured:
tag = pyne_mesh.mesh.tag_get_handle("idx")
hex_list = [ent for ent in pyne_mesh.structured_iterate_hex()]
indices = pyne_mesh.mesh.tag_get_data(tag, hex_list).flatten()
else:
indices = slice(None)
ds = np.asarray(getattr(pyne_mesh, fname)[indices], "float64")
ind = 0
for chunk in chunks:
for g in chunk.objs:
ind += g.select(selector, ds, rv[field], ind) # caches
return rv
| 32.565789
| 79
| 0.564848
|
79537b4a0340de993843bcb755e46d14cdb672b9
| 1,173
|
py
|
Python
|
setup.py
|
tuxpiper/raduga
|
e63bdd8f9d4154c0ac0a72a1182da5d137e38514
|
[
"MIT"
] | null | null | null |
setup.py
|
tuxpiper/raduga
|
e63bdd8f9d4154c0ac0a72a1182da5d137e38514
|
[
"MIT"
] | null | null | null |
setup.py
|
tuxpiper/raduga
|
e63bdd8f9d4154c0ac0a72a1182da5d137e38514
|
[
"MIT"
] | null | null | null |
'''
@author: David Losada Carballo <david@tuxpiper.com>
'''
from setuptools import setup, find_packages
setup(
name = "raduga",
version = "0.0.7",
packages = find_packages(),
description = ("Infrastructure-as-code framework for AWS"),
author = "David Losada Carballo",
author_email = "david@tuxpiper.com",
install_requires = ['cloudcast>=0.1.1', 'docopt>=0.6.1', 'boto>=2.26.1', 'setuptools>=3.3', 'json_tools==0.3.3'],
license = 'MIT',
keywords = "aws internet cloud infrastructure deployment automation",
long_description = open('README.md').read(),
url = "http://github.com/tuxpiper/raduga",
zip_safe = False,
entry_points = {
'console_scripts': [
'raduga = raduga.main:main',
]
},
package_data = {
# Script files that contain initial bootstrap sequences
'raduga': ['cfn/*']
},
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Topic :: System",
"Environment :: Console",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python"
],
)
| 30.076923
| 117
| 0.602728
|
79537bd4942df06473aa6d5f69c9e89612983b58
| 9,966
|
py
|
Python
|
tests/test_losses.py
|
blazejdolicki/vissl
|
9c10748a19fb1c637f32687142c8cd685f2410ff
|
[
"MIT"
] | 2,512
|
2021-01-27T18:44:44.000Z
|
2022-03-31T19:33:49.000Z
|
tests/test_losses.py
|
blazejdolicki/vissl
|
9c10748a19fb1c637f32687142c8cd685f2410ff
|
[
"MIT"
] | 361
|
2021-01-27T20:12:09.000Z
|
2022-03-31T12:39:34.000Z
|
tests/test_losses.py
|
blazejdolicki/vissl
|
9c10748a19fb1c637f32687142c8cd685f2410ff
|
[
"MIT"
] | 277
|
2021-01-29T08:09:02.000Z
|
2022-03-31T07:57:35.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import unittest
from collections import namedtuple
import torch
import torch.nn as nn
from classy_vision.generic.distributed_util import set_cpu_device
from parameterized import param, parameterized
from vissl.config import AttrDict
from vissl.losses.barlow_twins_loss import BarlowTwinsCriterion
from vissl.losses.cross_entropy_multiple_output_single_target import (
CrossEntropyMultipleOutputSingleTargetCriterion,
CrossEntropyMultipleOutputSingleTargetLoss,
)
from vissl.losses.multicrop_simclr_info_nce_loss import MultiCropSimclrInfoNCECriterion
from vissl.losses.simclr_info_nce_loss import SimclrInfoNCECriterion
from vissl.losses.swav_loss import SwAVCriterion
logger = logging.getLogger("__name__")
set_cpu_device()
BATCH_SIZE = 2048
EMBEDDING_DIM = 128
NUM_CROPS = 2
BUFFER_PARAMS_STRUCT = namedtuple(
"BUFFER_PARAMS_STRUCT", ["effective_batch_size", "world_size", "embedding_dim"]
)
BUFFER_PARAMS = BUFFER_PARAMS_STRUCT(BATCH_SIZE, 1, EMBEDDING_DIM)
class TestLossesForward(unittest.TestCase):
"""
Minimal testing of the losses: ensure that a forward pass with believable
dimensions succeeds. This does not make them correct per say.
"""
@staticmethod
def _get_embedding():
return torch.ones([BATCH_SIZE, EMBEDDING_DIM])
def test_simclr_info_nce_loss(self):
loss_layer = SimclrInfoNCECriterion(
buffer_params=BUFFER_PARAMS, temperature=0.1
)
_ = loss_layer(self._get_embedding())
def test_multicrop_simclr_info_nce_loss(self):
loss_layer = MultiCropSimclrInfoNCECriterion(
buffer_params=BUFFER_PARAMS, temperature=0.1, num_crops=NUM_CROPS
)
embedding = torch.ones([BATCH_SIZE * NUM_CROPS, EMBEDDING_DIM])
_ = loss_layer(embedding)
def test_swav_loss(self):
loss_layer = SwAVCriterion(
temperature=0.1,
crops_for_assign=[0, 1],
num_crops=2,
num_iters=3,
epsilon=0.05,
use_double_prec=False,
num_prototypes=[3000],
local_queue_length=0,
embedding_dim=EMBEDDING_DIM,
temp_hard_assignment_iters=0,
output_dir="",
)
_ = loss_layer(scores=self._get_embedding(), head_id=0)
def test_barlow_twins_loss(self):
loss_layer = BarlowTwinsCriterion(
lambda_=0.0051, scale_loss=0.024, embedding_dim=EMBEDDING_DIM
)
_ = loss_layer(self._get_embedding())
class TestBarlowTwinsCriterion(unittest.TestCase):
"""
Specific tests on Barlow Twins going further than just doing a forward pass
"""
def test_barlow_twins_backward(self):
EMBEDDING_DIM = 3
criterion = BarlowTwinsCriterion(
lambda_=0.0051, scale_loss=0.024, embedding_dim=EMBEDDING_DIM
)
embeddings = torch.randn((4, EMBEDDING_DIM), requires_grad=True)
self.assertTrue(embeddings.grad is None)
criterion(embeddings).backward()
self.assertTrue(embeddings.grad is not None)
with torch.no_grad():
next_embeddings = embeddings - embeddings.grad # gradient descent
self.assertTrue(criterion(next_embeddings) < criterion(embeddings))
class TestSimClrCriterion(unittest.TestCase):
"""
Specific tests on SimCLR going further than just doing a forward pass
"""
def test_simclr_info_nce_masks(self):
BATCH_SIZE = 4
WORLD_SIZE = 2
buffer_params = BUFFER_PARAMS_STRUCT(
BATCH_SIZE * WORLD_SIZE, WORLD_SIZE, EMBEDDING_DIM
)
criterion = SimclrInfoNCECriterion(buffer_params=buffer_params, temperature=0.1)
self.assertTrue(
criterion.pos_mask.equal(
torch.tensor(
[
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
]
)
)
)
self.assertTrue(
criterion.neg_mask.equal(
torch.tensor(
[
[0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0],
[1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0],
[0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0],
[1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0],
]
)
)
)
def test_simclr_backward(self):
EMBEDDING_DIM = 3
BATCH_SIZE = 4
WORLD_SIZE = 1
buffer_params = BUFFER_PARAMS_STRUCT(
BATCH_SIZE * WORLD_SIZE, WORLD_SIZE, EMBEDDING_DIM
)
criterion = SimclrInfoNCECriterion(buffer_params=buffer_params, temperature=0.1)
embeddings = torch.tensor(
[[1.0, 0.0, 1.0], [0.0, 1.0, 0.0], [1.0, 0.0, 1.0], [0.0, 1.0, 0.0]],
requires_grad=True,
)
self.assertTrue(embeddings.grad is None)
criterion(embeddings).backward()
self.assertTrue(embeddings.grad is not None)
print(embeddings.grad)
with torch.no_grad():
next_embeddings = embeddings - embeddings.grad # gradient descent
self.assertTrue(criterion(next_embeddings) < criterion(embeddings))
class TestCrossEntropyMultipleOutputSingleTargetLoss(unittest.TestCase):
@parameterized.expand(
[param(batch_size=1, target_count=2), param(batch_size=16, target_count=10)]
)
def test_single_input_single_target(self, batch_size: int, target_count: int):
torch.random.manual_seed(0)
logits = torch.randn(size=(batch_size, target_count))
target = torch.randint(0, target_count, size=(batch_size,))
ref_criterion = nn.CrossEntropyLoss()
criterion = CrossEntropyMultipleOutputSingleTargetCriterion()
self.assertEqual(criterion(logits, target), ref_criterion(logits, target))
@parameterized.expand(
[
param(batch_size=1, target_count=2, input_count=1),
param(batch_size=16, target_count=10, input_count=2),
]
)
def test_multiple_inputs_single_target(
self, batch_size: int, target_count: int, input_count: int
):
torch.random.manual_seed(0)
logits = [
torch.randn(size=(batch_size, target_count)) for _ in range(input_count)
]
target = torch.randint(0, target_count, size=(batch_size,))
ref_criterion = nn.CrossEntropyLoss()
ref_loss = sum(ref_criterion(logits[i], target) for i in range(input_count))
criterion = CrossEntropyMultipleOutputSingleTargetCriterion()
self.assertEqual(criterion(logits, target), ref_loss)
def test_multiple_targets_for_label_smoothing(self):
targets = torch.tensor([[0.8, 0.1, 0.1], [0.1, 0.8, 0.1], [0.1, 0.1, 0.8]])
logits = torch.tensor([[10.0, 0.0, 0.0], [0.0, 10.0, 0.0], [0.0, 0.0, 10.0]])
criterion = CrossEntropyMultipleOutputSingleTargetCriterion()
expected = (
(-torch.log(nn.Softmax(dim=-1)(logits)) * targets).sum(dim=1).mean().item()
)
self.assertAlmostEqual(criterion(logits, targets).item(), expected)
def test_label_smoothing_target_transformation(self):
target = torch.tensor([0, 1, 2], dtype=torch.int64)
smoothed = (
CrossEntropyMultipleOutputSingleTargetCriterion.apply_label_smoothing(
target=target, num_labels=4, label_smoothing=0.1
)
)
expected = torch.tensor(
[
[0.9250, 0.0250, 0.0250, 0.0250],
[0.0250, 0.9250, 0.0250, 0.0250],
[0.0250, 0.0250, 0.9250, 0.0250],
]
)
self.assertTrue(torch.allclose(expected, smoothed))
@parameterized.expand(
[param(batch_size=1, target_count=2), param(batch_size=16, target_count=10)]
)
def test_single_target_label_smoothing(self, batch_size: int, target_count: int):
torch.random.manual_seed(0)
logits = torch.randn(size=(batch_size, target_count))
target = torch.randint(0, target_count, size=(batch_size,))
# Verify that label smoothing is supported in forward pass
criterion = CrossEntropyMultipleOutputSingleTargetCriterion(label_smoothing=0.1)
loss = criterion(logits, target)
self.assertTrue(loss.item() > 0.0)
@parameterized.expand(
[
param(temperature=0.1, normalize_output=False, label_smoothing=0.0),
param(temperature=1.0, normalize_output=True, label_smoothing=0.0),
param(temperature=2.0, normalize_output=False, label_smoothing=0.5),
]
)
def test_configuration(
self,
temperature: float,
normalize_output: bool,
label_smoothing: float,
batch_size: int = 16,
target_count: int = 10,
):
torch.random.manual_seed(0)
logits = torch.randn(size=(batch_size, target_count))
target = torch.randint(0, target_count, size=(batch_size,))
criterion_ref = CrossEntropyMultipleOutputSingleTargetCriterion(
temperature=temperature,
normalize_output=normalize_output,
label_smoothing=label_smoothing,
)
config = AttrDict(
{
"temperature": temperature,
"normalize_output": normalize_output,
"label_smoothing": label_smoothing,
}
)
criterion = CrossEntropyMultipleOutputSingleTargetLoss(config)
self.assertEqual(criterion(logits, target), criterion_ref(logits, target))
| 37.466165
| 88
| 0.630544
|
79537c47b0d41b0be1a92a3b9d8e1f8fa41b8e09
| 1,141
|
py
|
Python
|
setup.py
|
acpaquette/pysis
|
c0d84e6e5a803e11844a866ef5e30c6eee63b01b
|
[
"BSD-3-Clause"
] | 11
|
2015-05-13T01:02:59.000Z
|
2021-12-24T02:27:02.000Z
|
setup.py
|
acpaquette/pysis
|
c0d84e6e5a803e11844a866ef5e30c6eee63b01b
|
[
"BSD-3-Clause"
] | 20
|
2015-04-19T23:43:34.000Z
|
2020-07-31T23:22:06.000Z
|
setup.py
|
acpaquette/pysis
|
c0d84e6e5a803e11844a866ef5e30c6eee63b01b
|
[
"BSD-3-Clause"
] | 8
|
2016-07-19T20:51:25.000Z
|
2020-03-24T04:58:25.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='pysis',
version='0.6.1',
description='Toolkit for using USGS Isis in Python.',
long_description=readme + '\n\n' + history,
author='Trevor Olson',
author_email='trevor@heytrevor.com',
url='https://github.com/wtolson/pysis',
packages=find_packages(exclude=["tests"]),
include_package_data=True,
license='BSD',
zip_safe=False,
keywords='pysis',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.6'
]
)
| 31.694444
| 66
| 0.607362
|
79537c81f1ed0b2781f20d041444a1222898ba98
| 2,235
|
py
|
Python
|
gym_minigrid/envs/lavagap.py
|
floringogianu/gym-minigrid
|
0403f4dc003c393db8111434e58678ccb023eb8d
|
[
"BSD-3-Clause"
] | null | null | null |
gym_minigrid/envs/lavagap.py
|
floringogianu/gym-minigrid
|
0403f4dc003c393db8111434e58678ccb023eb8d
|
[
"BSD-3-Clause"
] | null | null | null |
gym_minigrid/envs/lavagap.py
|
floringogianu/gym-minigrid
|
0403f4dc003c393db8111434e58678ccb023eb8d
|
[
"BSD-3-Clause"
] | null | null | null |
from gym_minigrid.minigrid import *
from gym_minigrid.register import register
class LavaGapEnv(MiniGridEnv):
"""
Environment with one wall of lava with a small gap to cross through
This environment is similar to LavaCrossing but simpler in structure.
"""
def __init__(self, size, obstacle_type=Lava, seed=None):
self.obstacle_type = obstacle_type
super().__init__(
grid_size=size,
max_steps=4*size*size,
# Set this to True for maximum speed
see_through_walls=False,
seed=None
)
def _gen_grid(self, width, height):
assert width >= 5 and height >= 5
# Create an empty grid
self.grid = Grid(width, height)
# Generate the surrounding walls
self.grid.wall_rect(0, 0, width, height)
# Place the agent in the top-left corner
self.agent_pos = (1, 1)
self.agent_dir = 0
# Place a goal square in the bottom-right corner
self.goal_pos = np.array((width - 2, height - 2))
self.grid.set(*self.goal_pos, Goal())
# Generate and store random gap position
self.gap_pos = np.array((
self._rand_int(2, width - 2),
self._rand_int(1, height - 1),
))
# Place the obstacle wall
self.grid.vert_wall(self.gap_pos[0], 1, height - 2, self.obstacle_type)
# Put a hole in the wall
self.grid.set(*self.gap_pos, None)
self.mission = (
"avoid the lava and get to the green goal square"
if self.obstacle_type == Lava
else "find the opening and get to the green goal square"
)
class LavaGapS5Env(LavaGapEnv):
def __init__(self):
super().__init__(size=5)
class LavaGapS6Env(LavaGapEnv):
def __init__(self):
super().__init__(size=6)
class LavaGapS7Env(LavaGapEnv):
def __init__(self):
super().__init__(size=7)
register(
id='MiniGrid-LavaGapS5-v0',
entry_point='gym_minigrid.envs:LavaGapS5Env'
)
register(
id='MiniGrid-LavaGapS6-v0',
entry_point='gym_minigrid.envs:LavaGapS6Env'
)
register(
id='MiniGrid-LavaGapS7-v0',
entry_point='gym_minigrid.envs:LavaGapS7Env'
)
| 27.592593
| 79
| 0.624609
|
79537d8f1a33508e311f0c1c3b9ec001ea8b57cf
| 13,996
|
py
|
Python
|
django_lazifier/utils/builtin_types/list.py
|
hotmit/django-lazifier
|
a2914b7ced955fa91b1961025e1d3ccc7ac3702a
|
[
"MIT"
] | 1
|
2017-04-27T19:25:34.000Z
|
2017-04-27T19:25:34.000Z
|
django_lazifier/utils/builtin_types/list.py
|
hotmit/django-lazifier
|
a2914b7ced955fa91b1961025e1d3ccc7ac3702a
|
[
"MIT"
] | null | null | null |
django_lazifier/utils/builtin_types/list.py
|
hotmit/django-lazifier
|
a2914b7ced955fa91b1961025e1d3ccc7ac3702a
|
[
"MIT"
] | null | null | null |
from collections import OrderedDict
import random
from django_lazifier.utils.builtin_types.obj import Obj
from django_lazifier.utils.builtin_types.str import Str
from django_lazifier.utils.utils import log_exception
class Lst:
@classmethod
def get_random(cls, the_list: list, pop=False, default_value=None):
"""
Get one item at random in the specified list.
:param the_list:
:param pop:
:param default_value:
:return:
"""
if not the_list:
return default_value
length = len(the_list)
rand_index = random.randint(0, length-1)
if pop:
return the_list.pop(rand_index)
return the_list[rand_index]
@classmethod
def casefold(cls, str_list):
"""
Pass each string element through str.casefold()
:param str_list:
:return:
"""
return [str(x).casefold() for x in str_list]
@classmethod
def convert_to_int(cls, str_list):
"""
Convert a list of string into a list of int
:param str_list: ["1", "2.99", "0.11"] => [1, 3, 0]
:return: []
"""
if not str_list:
return []
int_list = []
for s in str_list:
val = Str.int_val(s, None)
if val is not None:
int_list.append(val)
return int_list
@classmethod
def convert_to_str(cls, the_list):
"""
Convert a list of object into a list of string
:return: []
"""
if not the_list:
return []
result = []
for s in the_list:
if s is not None:
result.append(s.__str__())
return result
@classmethod
def strip_string(cls, the_list, chars=None):
"""
Trim the list of strings.
:param the_list:
:param chars:
:return:
"""
the_list = Lst.convert_to_str(the_list)
return [elm.strip(chars) for elm in the_list]
@classmethod
def group_by(cls, the_list, group, none_value_label='None', flat=False):
"""
Group the list by the group specified.
eg. Lst.group_by(seats, 'zone.name', 'do not belong to a zone')
=> { 'zone1': [seat1, seat2]
'zone2': [seat7, seat8]
'do not belong to a zone': [seat3, seat4, seat5]
}
:param the_list:
:param group: {str|def} name of the attribute, support dot notation group_by(persons, 'contact.phone')
:param none_value_label: the value of the column specified is None then use this label as the key.
:param flat: if true only take the last item for each group and put it in the result dict
:rtype: dict
"""
result = OrderedDict()
for row in the_list:
if callable(group):
col_value = group(row)
else:
col_value = Obj.getattr(row, group, None)
if col_value is None:
col_value = none_value_label
if not flat and col_value not in result:
result[col_value] = []
if flat:
result[col_value] = row
else:
result[col_value].append(row)
return result
@classmethod
def multi_group_by(cls, the_list, none_value_label, group_names: list):
"""
Provide a drilled down version of the data.
eg. Lst.multi_group_by(sensors, _('Unassigned'), ['facility__id', 'zone__id'])
return { facility_1 : [ {zone_1 : [ {sensor_1},
{sensor_2} ],
{zone_2 : [ {sensor_3},
{sensor_4} ]
:type the_list: list|QuerySet|ValuesQuerySet
:param the_list: list, QuerySet or ValuesQuerySet
:type none_value_label: str|None|object
:param none_value_label: the value to use if the column value is None
:param group_names: the list of columns to group by
:return: List
"""
if type(group_names) == str:
group_names = [group_names]
if not isinstance(group_names, list):
raise ValueError('The argument group_names must be a list of all the columns you want to group.')
group_names = group_names.copy()
if group_names:
col = group_names.pop(0)
result = Lst.group_by(the_list, col, none_value_label)
if group_names:
for col, rows in result.items():
result[col] = Lst.multi_group_by(rows, none_value_label, group_names)
return result
return OrderedDict()
@classmethod
def tuple_multi_group_by(cls, the_list, none_value_label, group_names: list):
"""
Similarly to multi_group_by but instead of use the value of the specified columns
as a key it combine all the keys together in one tuple as key.
eg. sensors = Sensor.objects.values(**columns)
Lst.tuple_multi_group_by(sensors, 'None', ['facility__id', 'zone__id'])
return { (facility_1, zone_1): [ sensor1, sensor2 ],
(facility_1, zone_2): [ sensor3 ],
(facility_2, zone_3): [ sensor4 ])
:type the_list: list|QuerySet|ValuesQuerySet
:param the_list: list, QuerySet or ValuesQuerySet
:param none_value_label: the value to use if the column value is None
:param group_names: the list of columns to group by
:return: List
"""
if type(group_names) == str:
group_names = [group_names]
if not isinstance(group_names, list):
raise ValueError('The argument group_names must be a list of all the fields you want to group.')
group_names = group_names.copy()
if group_names:
result = OrderedDict()
first_grp_val = group_names.pop(0) # pop at the start
first_group_by = Lst.group_by(the_list, first_grp_val, none_value_label)
if group_names:
for col, rows in first_group_by.items():
tuple_list = Lst.tuple_multi_group_by(rows, none_value_label, group_names)
for k, t in tuple_list.items():
result[(col,) + k] = t
else:
for k, v in first_group_by.items():
result[(k,)] = v
return result
return OrderedDict()
@classmethod
def all(cls, the_list, func, **kwargs):
"""
Return True if all is True, else False.
Similar to all() but its accept a lambda.
:param the_list:
:param func: lambda that return bool
:param kwargs: any additional params for func
:return:
"""
for i in the_list:
if not func(i, **kwargs):
return False
return True
@classmethod
def any(cls, the_list, func, **kwargs):
"""
Return True if any is True, else False.
Similar to any() but its accept a lambda.
:param the_list:
:param func: lambda that return bool
:param kwargs: any additional params for func
:return:
"""
for i in the_list:
if func(i, **kwargs):
return True
return False
@classmethod
def prep_select_optgroups(cls, the_list, opt_groups: list, value_attr, display_attr, none_value_label, sort_result=False):
"""
Prep list to be use as a choice for the ChoiceField
eg. sensor_choices = Lst.prep_select_optgroups(sensors, ['facility.name', 'zone.name'],
'id', 'sensor_name', _('Unassigned Sensors'))
:param the_list: ValueQuerySet, QuerySet or list
:param opt_groups: the group column/attr name or index
:param value_attr: the option value
:param display_attr: the option display text
:return:
"""
groups = Lst.tuple_multi_group_by(the_list, none_value_label, opt_groups)
if groups:
result = []
for tp, arr in groups.items():
og_header = ' > '.join(tp)
og_list = []
for row in arr:
og_list.append((Obj.getattr(row, value_attr), Obj.getattr(row, display_attr),))
result.append((og_header, tuple(og_list),))
return tuple(result)
if sort_result:
return sorted(groups)
return groups
@classmethod
def get_unique(cls, the_list, default_value=None, unique_attr=None):
"""
Get a list of unique values in the list, default_value is [] if default_value is set to None.
:param the_list:
:param default_value: if none value is []
:param unique_attr: select your own unique attribute (in case when the object is unhashable
or you want your own attr)
:rtype list
"""
if default_value is None:
default_value = []
if not the_list:
return default_value
try:
# Src: http://stackoverflow.com/questions/480214
# /how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order
# Src: http://www.peterbe.com/plog/uniqifiers-benchmark
if unique_attr is None:
added_list = set()
add_to_added_list = added_list.add # this static ref for performance reason
return [x for x in the_list if not (x in added_list or add_to_added_list(x))]
result = []
existed_item = {} # dict is much faster than list when checking existence of a key
for itm in the_list:
key = Obj.getattr(itm, unique_attr)
if key not in existed_item:
result.append(itm)
existed_item[key] = None
return result
except Exception as ex:
log_exception(ex)
return default_value
@classmethod
def reverse(cls, the_list: list):
"""
Reverse the order of the items in the list.
:param the_list:
:return:
"""
if not list:
return []
# return list(reversed(the_list))
return the_list[::-1]
@classmethod
def contains_all(cls, the_list, *args):
"""
Check to see if the_list contains all of the args
:param the_list: the haystack
:param args: the needle
:return:
"""
return Lst.all(args, lambda x: x in the_list)
@classmethod
def contains_any(cls, the_list, *args):
"""
Check to see if the_list contains any of the args
:param the_list: the haystack
:param args: the needle
:return:
"""
return Lst.any(args, lambda x: x in the_list)
@classmethod
def unordered_list_equals(cls, lst_a, lst_b):
if not isinstance(lst_a, list) or not isinstance(lst_b, list):
return False
if lst_a == lst_b:
return True
if len(lst_a) != len(lst_b):
return False
return set(lst_a) == set(lst_b)
@classmethod
def str_join(cls, lst, separator=', ', value_attr: str=None):
if not lst:
return ''
str_list = []
for itm in lst:
if value_attr is not None:
itm = Obj.getattr(itm, value_attr)
itm = str(itm)
str_list.append(itm)
return separator.join(str_list)
@classmethod
def chunks(cls, lst, chunk_size, pad_with=None):
"""
Split the list into chunks.
eg. [1, 2, 3, 4, 5] (chunk == 2) => result [ [1, 2], [3, 4], [5] ]
"""
result = []
for i in range(0, len(lst), chunk_size):
result.append(lst[i:i + chunk_size])
if result and pad_with is not None and len(result[-1]) != chunk_size:
result[-1] = result[-1] + ([pad_with] * (chunk_size - len(result[-1])))
return result
@classmethod
def get_first(cls, the_list, default_value=None):
"""
Get the first item of the list.
:param the_list:
:param default_value:
:return:
"""
if the_list:
for itm in the_list:
return itm
return default_value
@classmethod
def map_to(cls, the_list, attribs: list, default_value=None, execute_callable=True):
"""
Go through the list and extract the specified attributes
:param the_list:
:param attribs:
:type default_value: object|dict
:param default_value: either a value for all fields default or pass a dict to supply specific default value.
:return: List of value lists
"""
result = []
if not the_list:
return result
for itm in the_list:
row = []
for att in attribs:
specific_default = default_value
if isinstance(default_value, dict) and att in default_value:
specific_default = default_value.get(att, default_value)
value = Obj.getattr(itm, att, specific_default, execute_callable=execute_callable)
row.append(value)
result.append(row)
return result
| 33.88862
| 127
| 0.544656
|
79537e4fbaed0761f44fac8c9b5ffbaaa531a12b
| 820
|
py
|
Python
|
python/y2019/d01/day01.py
|
luke-dixon/aoc
|
94851a5866a1ef29e3ba10098160cba883882683
|
[
"MIT"
] | 1
|
2021-01-12T20:04:01.000Z
|
2021-01-12T20:04:01.000Z
|
python/y2019/d01/day01.py
|
luke-dixon/aoc
|
94851a5866a1ef29e3ba10098160cba883882683
|
[
"MIT"
] | null | null | null |
python/y2019/d01/day01.py
|
luke-dixon/aoc
|
94851a5866a1ef29e3ba10098160cba883882683
|
[
"MIT"
] | null | null | null |
from typing import List
from lib import puzzle
def calc_fuel(x: int, recursive: bool = False):
fuel_required = x // 3
fuel_required -= 2
if recursive and fuel_required > 0:
more_fuel = calc_fuel(fuel_required, recursive=recursive)
if more_fuel > 0:
fuel_required += more_fuel
return fuel_required
def part1(data: List[int]) -> int:
return sum(calc_fuel(x) for x in data)
def part2(data: List[int]) -> int:
return sum(calc_fuel(x, recursive=True) for x in data)
class Day01(puzzle.Puzzle):
year = '2019'
day = '1'
def get_data(self) -> List[int]:
return [int(x) for x in self.input_data.splitlines()]
def run(self):
print(f'Answer part 1: {part1(self.get_data())}')
print(f'Answer part 2: {part2(self.get_data())}')
| 22.777778
| 65
| 0.635366
|
79537ffea4d5375276bfc704c798294f6a262693
| 113
|
py
|
Python
|
functions.py
|
rickyu/zspam
|
f8fffff197c4ed877eafe59ab2f3b9a6d9d7a203
|
[
"MIT"
] | null | null | null |
functions.py
|
rickyu/zspam
|
f8fffff197c4ed877eafe59ab2f3b9a6d9d7a203
|
[
"MIT"
] | null | null | null |
functions.py
|
rickyu/zspam
|
f8fffff197c4ed877eafe59ab2f3b9a6d9d7a203
|
[
"MIT"
] | null | null | null |
#encoding:utf-8
from datetime import datetime
def user_register_time(mid):
return datetime.now().toordinal()
| 22.6
| 37
| 0.778761
|
79538060073eb0974c1a87c7e7ddf25ac2814e61
| 159
|
py
|
Python
|
exercises/while_loops.py
|
gaya19-meet/y2s18-python_review
|
d867248bc15c4c61fd0ee4535b3c8cb0a6c47eaa
|
[
"MIT"
] | null | null | null |
exercises/while_loops.py
|
gaya19-meet/y2s18-python_review
|
d867248bc15c4c61fd0ee4535b3c8cb0a6c47eaa
|
[
"MIT"
] | null | null | null |
exercises/while_loops.py
|
gaya19-meet/y2s18-python_review
|
d867248bc15c4c61fd0ee4535b3c8cb0a6c47eaa
|
[
"MIT"
] | null | null | null |
# Write your solution for 1.3 here!
counter = 0
sum = 0
while sum < 10000:
counter = counter + 1
sum = sum + counter
print (sum)
print (counter)
| 7.571429
| 35
| 0.622642
|
795380d85781f0b183dae973c1626b2ffe6f1fea
| 3,205
|
py
|
Python
|
trello/member.py
|
dmwyatt/py-trello
|
2ad38e7da8b2ed2b57cc589c6886bf891a0c6a20
|
[
"BSD-3-Clause"
] | 2
|
2021-12-02T11:41:02.000Z
|
2021-12-27T12:01:53.000Z
|
trello/member.py
|
dmwyatt/py-trello
|
2ad38e7da8b2ed2b57cc589c6886bf891a0c6a20
|
[
"BSD-3-Clause"
] | null | null | null |
trello/member.py
|
dmwyatt/py-trello
|
2ad38e7da8b2ed2b57cc589c6886bf891a0c6a20
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import with_statement, print_function, absolute_import
from trello import TrelloBase
from trello.compat import force_str
class Member(TrelloBase):
"""
Class representing a Trello member.
"""
def __init__(self, client, member_id, full_name=''):
super(Member, self).__init__()
self.client = client
self.id = member_id
self.full_name = full_name
def __repr__(self):
return force_str(u'<Member %s>' % self.id)
def fetch(self):
"""Fetch all attributes for this member"""
json_obj = self.client.fetch_json(
'/members/' + self.id,
query_params={'badges': False})
self.status = json_obj['status']
self.id = json_obj.get('id', '')
self.bio = json_obj.get('bio', '')
self.url = json_obj.get('url', '')
self.username = json_obj['username']
self.full_name = json_obj['fullName']
self.initials = json_obj['initials']
return self
def fetch_comments(self):
if self.badges['comments'] > 0:
comments = self.client.fetch_json(
'/members/' + self.id + '/actions',
query_params={'filter': 'commentCard'})
return sorted(comments, key=lambda comment: comment['date'])
return []
def fetch_cards(self):
""" Fetches all the cards for this member """
cards = self.client.fetch_json(
'/members/' + self.id + '/cards',
query_params={'filter': 'visible'})
return sorted(cards, key=lambda card: card['dateLastActivity'])
def fetch_notifications(self, filters = []):
""" Fetches all the notifications for this member """
notifications = self.client.fetch_json(
'/members/' + self.id + '/notifications',
query_params={'filter': ",".join(filters)})
return sorted(notifications, key=lambda notification: notification['date'])
def get_boards(self, list_filter):
"""Get boards using filter
:rtype: list of Board
"""
from trello.board import Board
from trello.organization import Organization
json_obj = self.client.fetch_json(
'/members/' + self.id + '/boards',
query_params={'lists': 'none', 'filter': list_filter})
organizations = {obj['idOrganization']: self.client.get_organization(obj['idOrganization']) for obj in json_obj if obj['idOrganization']}
return [Board.from_json(trello_client=self.client, organization=organizations.get(obj['idOrganization']), json_obj=obj) for obj in json_obj]
@classmethod
def from_json(cls, trello_client, json_obj):
"""
Deserialize the organization json object to a member object
:trello_client: the trello client
:json_obj: the member json object
"""
member = Member(trello_client, json_obj['id'], full_name=json_obj['fullName'])
member.username = json_obj.get('username', '')
member.initials = json_obj.get('initials', '')
# cannot close an organization
# organization.closed = json_obj['closed']
return member
| 37.267442
| 148
| 0.615913
|
795381ebf8db57bdd4e29dfeb2a9c96e74f0fa48
| 6,589
|
py
|
Python
|
_unittests/ut_special/test_rue_paris.py
|
mohamedelkansouli/Ensae_py
|
8bc867bd2081c259c793fadfa8be5dcc7bd1400b
|
[
"MIT"
] | null | null | null |
_unittests/ut_special/test_rue_paris.py
|
mohamedelkansouli/Ensae_py
|
8bc867bd2081c259c793fadfa8be5dcc7bd1400b
|
[
"MIT"
] | null | null | null |
_unittests/ut_special/test_rue_paris.py
|
mohamedelkansouli/Ensae_py
|
8bc867bd2081c259c793fadfa8be5dcc7bd1400b
|
[
"MIT"
] | null | null | null |
"""
@brief test log(time=25s)
"""
import os
import sys
import unittest
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import fix_tkinter_issues_virtualenv
from pyensae.datasource import download_data
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
from src.ensae_teaching_cs.special.rues_paris import get_data, bellman, kruskal, possible_edges, distance_haversine, graph_degree
from src.ensae_teaching_cs.special.rues_paris import eulerien_extension, distance_paris, euler_path, connected_components
class TestRueParis (unittest.TestCase):
def test_get_data(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
folder = os.path.join(
os.path.abspath(
os.path.dirname(__file__)),
"temp_rues")
if not os.path.exists(folder):
os.mkdir(folder)
for ext in [".txt", ".zip"]:
f = os.path.join(folder, "paris_54000" + ext)
if os.path.exists(f):
os.remove(f)
try:
data = get_data(whereTo=folder, fLOG=fLOG, timeout=60)
except Exception as e:
if "unable to retrieve data" in str(e):
return
else:
raise Exception("*****" + str(e) + "*****") from e
fLOG(len(data))
assert len(data) > 0
total = sum(_[-1] for _ in data)
fLOG("total length", total)
def test_algo(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
folder = os.path.join(
os.path.abspath(
os.path.dirname(__file__)),
"temp_algo")
if not os.path.exists(folder):
os.mkdir(folder)
edges = get_data(whereTo=folder, fLOG=fLOG)
edges = edges[:1000]
max_segment = max(e[-1] for e in edges)
possibles = possible_edges(edges, max_segment / 8, fLOG=fLOG)
init = bellman(edges, fLOG=fLOG, allow=lambda e: e in possibles)
fLOG("---")
init = bellman(
edges,
fLOG=fLOG,
allow=lambda e: e in possibles,
init=init)
fLOG("---")
added = kruskal(edges, init, fLOG=fLOG)
d = graph_degree(edges + added)
allow = sorted([k for k, v in d.items() if v % 2 == 1])
fLOG("degrees", allow)
allow = set(allow)
fLOG("---")
init = bellman(edges, fLOG=fLOG,
allow=lambda e: e in possibles or e[
0] in allow or e[1] in allow,
init=init)
fLOG("---")
added = kruskal(edges, init, fLOG=fLOG)
d = graph_degree(edges + added)
allow = sorted([k for k, v in d.items() if v % 2 == 1])
fLOG("degrees", allow)
def test_algo2(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
folder = os.path.join(
os.path.abspath(
os.path.dirname(__file__)),
"temp_algo2")
if not os.path.exists(folder):
os.mkdir(folder)
edges = get_data(whereTo=folder, fLOG=fLOG)
edges = edges[:1000]
added = eulerien_extension(edges, fLOG=fLOG, alpha=1 / 8)
assert len(added) > 0
fLOG("nb added", len(added))
def test_euler(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
folder = os.path.join(
os.path.abspath(
os.path.dirname(__file__)),
"temp_rues_euler")
if not os.path.exists(folder):
os.mkdir(folder)
edges = get_data(whereTo=folder, fLOG=fLOG)
data = download_data("added.zip", whereTo=folder, fLOG=fLOG)
with open(data[0], "r") as f:
text = f.read()
added_edges = eval(text)
path = euler_path(edges, added_edges)
fLOG(len(path), len(edges) + len(added_edges))
for p in path[:5]:
fLOG(len(p), p)
for p in path[-5:]:
fLOG(len(p), p)
def test_algo_euler4(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
folder = os.path.join(
os.path.abspath(
os.path.dirname(__file__)),
"temp_algo_euler4")
if not os.path.exists(folder):
os.mkdir(folder)
edges = get_data(whereTo=folder, fLOG=fLOG)
edges = edges[:3]
vertices = {}
for e in edges:
for i in range(0, 2):
_ = e[i]
p = e[i + 3]
vertices[_] = p
connex = connected_components(edges)
v = [v for k, v in connex.items()]
mi, ma = min(v), max(v)
while mi != ma:
edges.append((mi, ma, 2, vertices[mi], vertices[ma],
distance_haversine(* (vertices[mi] + vertices[ma]))))
connex = connected_components(edges)
v = [v for k, v in connex.items()]
mi, ma = min(v), max(v)
fix_tkinter_issues_virtualenv()
import matplotlib.pyplot as plt
import networkx as nx
plt.figure()
G = nx.Graph()
for e in edges:
a, b = e[:2]
G.add_edge(a, b)
pos = nx.spring_layout(G)
nx.draw(G, pos, node_color='#A0CBE2')
plt.savefig(os.path.join(folder, "graph1.png"))
plt.close('all')
added = eulerien_extension(edges, fLOG=lambda *l: None,
distance=distance_paris)
for e in added:
a, b = e[:2]
G.add_edge(a, b)
plt.figure()
pos = nx.spring_layout(G)
graph_degree(edges + added)
#labels={ v:"{0}".format(deg[v]) for v in G.nodes() }
nx.draw(G, pos, node_color='#A0CBE2' # ,labels=labels
)
plt.savefig(os.path.join(folder, "graph2.png"))
plt.close('all')
path = euler_path(edges, added)
alls = edges + added
fLOG(len(alls), len(path))
#assert len(alls) == len(path)
if __name__ == "__main__":
unittest.main()
| 31.830918
| 129
| 0.524814
|
7953825265a3c45861364ddfb5d30170f362a594
| 1,890
|
py
|
Python
|
alipay/aop/api/domain/AlipaySocialQuestionnareGrayUpgradeModel.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | 213
|
2018-08-27T16:49:32.000Z
|
2021-12-29T04:34:12.000Z
|
alipay/aop/api/domain/AlipaySocialQuestionnareGrayUpgradeModel.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | 29
|
2018-09-29T06:43:00.000Z
|
2021-09-02T03:27:32.000Z
|
alipay/aop/api/domain/AlipaySocialQuestionnareGrayUpgradeModel.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | 59
|
2018-08-27T16:59:26.000Z
|
2022-03-25T10:08:15.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipaySocialQuestionnareGrayUpgradeModel(object):
def __init__(self):
self._ext_info = None
self._gray_percent = None
self._qstn_id = None
@property
def ext_info(self):
return self._ext_info
@ext_info.setter
def ext_info(self, value):
self._ext_info = value
@property
def gray_percent(self):
return self._gray_percent
@gray_percent.setter
def gray_percent(self, value):
self._gray_percent = value
@property
def qstn_id(self):
return self._qstn_id
@qstn_id.setter
def qstn_id(self, value):
self._qstn_id = value
def to_alipay_dict(self):
params = dict()
if self.ext_info:
if hasattr(self.ext_info, 'to_alipay_dict'):
params['ext_info'] = self.ext_info.to_alipay_dict()
else:
params['ext_info'] = self.ext_info
if self.gray_percent:
if hasattr(self.gray_percent, 'to_alipay_dict'):
params['gray_percent'] = self.gray_percent.to_alipay_dict()
else:
params['gray_percent'] = self.gray_percent
if self.qstn_id:
if hasattr(self.qstn_id, 'to_alipay_dict'):
params['qstn_id'] = self.qstn_id.to_alipay_dict()
else:
params['qstn_id'] = self.qstn_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipaySocialQuestionnareGrayUpgradeModel()
if 'ext_info' in d:
o.ext_info = d['ext_info']
if 'gray_percent' in d:
o.gray_percent = d['gray_percent']
if 'qstn_id' in d:
o.qstn_id = d['qstn_id']
return o
| 26.619718
| 75
| 0.58836
|
7953835e57754eaed5e1936da17007e7cc0bf6a8
| 129
|
py
|
Python
|
backend/user/urls.py
|
pennlabs/student-life
|
aaac7109b9bd2617787cb3aa813e5f736abb24a5
|
[
"MIT"
] | 7
|
2019-12-25T04:11:24.000Z
|
2021-10-11T05:00:17.000Z
|
backend/user/urls.py
|
pennlabs/student-life
|
aaac7109b9bd2617787cb3aa813e5f736abb24a5
|
[
"MIT"
] | 41
|
2019-12-25T18:37:35.000Z
|
2021-10-10T19:50:21.000Z
|
backend/user/urls.py
|
pennlabs/student-life
|
aaac7109b9bd2617787cb3aa813e5f736abb24a5
|
[
"MIT"
] | null | null | null |
from django.urls import path
from user import views
urlpatterns = [
path("me/", views.UserView.as_view(), name="user"),
]
| 14.333333
| 55
| 0.682171
|
7953835effae21ee459a7cf919d6afbd67ec969d
| 43,798
|
py
|
Python
|
sympy/mpmath/libmp/libelefun.py
|
benjaminmcdonald/sympy
|
dc44dcc6d6d5f2d0a7ede35eff5f421ab4b11a3e
|
[
"BSD-3-Clause"
] | 1
|
2016-07-13T04:30:25.000Z
|
2016-07-13T04:30:25.000Z
|
sympy/mpmath/libmp/libelefun.py
|
jegerjensen/sympy
|
3a43310f1957a21a6f095fe2801cc05b5268a2c7
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/mpmath/libmp/libelefun.py
|
jegerjensen/sympy
|
3a43310f1957a21a6f095fe2801cc05b5268a2c7
|
[
"BSD-3-Clause"
] | null | null | null |
"""
This module implements computation of elementary transcendental
functions (powers, logarithms, trigonometric and hyperbolic
functions, inverse trigonometric and hyperbolic) for real
floating-point numbers.
For complex and interval implementations of the same functions,
see libmpc and libmpi.
"""
import math
from bisect import bisect
from backend import MPZ, MPZ_ZERO, MPZ_ONE, MPZ_TWO, MPZ_FIVE, BACKEND
from libmpf import (
round_floor, round_ceiling, round_down, round_up,
round_nearest, round_fast,
ComplexResult,
bitcount, bctable, lshift, rshift, giant_steps, sqrt_fixed,
from_int, to_int, from_man_exp, to_fixed, to_float, from_float,
from_rational, normalize,
fzero, fone, fnone, fhalf, finf, fninf, fnan,
mpf_cmp, mpf_sign, mpf_abs,
mpf_pos, mpf_neg, mpf_add, mpf_sub, mpf_mul, mpf_div, mpf_shift,
mpf_rdiv_int, mpf_pow_int, mpf_sqrt,
reciprocal_rnd, negative_rnd, mpf_perturb,
isqrt_fast
)
from libintmath import ifib
#-------------------------------------------------------------------------------
# Tuning parameters
#-------------------------------------------------------------------------------
# Cutoff for computing exp from cosh+sinh. This reduces the
# number of terms by half, but also requires a square root which
# is expensive with the pure-Python square root code.
if BACKEND == 'python':
EXP_COSH_CUTOFF = 600
else:
EXP_COSH_CUTOFF = 400
# Cutoff for using more than 2 series
EXP_SERIES_U_CUTOFF = 1500
# Also basically determined by sqrt
if BACKEND == 'python':
COS_SIN_CACHE_PREC = 400
else:
COS_SIN_CACHE_PREC = 200
COS_SIN_CACHE_STEP = 8
cos_sin_cache = {}
# Number of integer logarithms to cache (for zeta sums)
MAX_LOG_INT_CACHE = 2000
log_int_cache = {}
LOG_TAYLOR_PREC = 2500 # Use Taylor series with caching up to this prec
LOG_TAYLOR_SHIFT = 9 # Cache log values in steps of size 2^-N
log_taylor_cache = {}
# prec/size ratio of x for fastest convergence in AGM formula
LOG_AGM_MAG_PREC_RATIO = 20
ATAN_TAYLOR_PREC = 3000 # Same as for log
ATAN_TAYLOR_SHIFT = 7 # steps of size 2^-N
atan_taylor_cache = {}
# ~= next power of two + 20
cache_prec_steps = [22,22]
for k in xrange(1, bitcount(LOG_TAYLOR_PREC)+1):
cache_prec_steps += [min(2**k,LOG_TAYLOR_PREC)+20] * 2**(k-1)
#----------------------------------------------------------------------------#
# #
# Elementary mathematical constants #
# #
#----------------------------------------------------------------------------#
def constant_memo(f):
"""
Decorator for caching computed values of mathematical
constants. This decorator should be applied to a
function taking a single argument prec as input and
returning a fixed-point value with the given precision.
"""
f.memo_prec = -1
f.memo_val = None
def g(prec, **kwargs):
memo_prec = f.memo_prec
if prec <= memo_prec:
return f.memo_val >> (memo_prec-prec)
newprec = int(prec*1.05+10)
f.memo_val = f(newprec, **kwargs)
f.memo_prec = newprec
return f.memo_val >> (newprec-prec)
g.__name__ = f.__name__
g.__doc__ = f.__doc__
return g
def def_mpf_constant(fixed):
"""
Create a function that computes the mpf value for a mathematical
constant, given a function that computes the fixed-point value.
Assumptions: the constant is positive and has magnitude ~= 1;
the fixed-point function rounds to floor.
"""
def f(prec, rnd=round_fast):
wp = prec + 20
v = fixed(wp)
if rnd in (round_up, round_ceiling):
v += 1
return normalize(0, v, -wp, bitcount(v), prec, rnd)
f.__doc__ = fixed.__doc__
return f
def bsp_acot(q, a, b, hyperbolic):
if b - a == 1:
a1 = MPZ(2*a + 3)
if hyperbolic or a&1:
return MPZ_ONE, a1 * q**2, a1
else:
return -MPZ_ONE, a1 * q**2, a1
m = (a+b)//2
p1, q1, r1 = bsp_acot(q, a, m, hyperbolic)
p2, q2, r2 = bsp_acot(q, m, b, hyperbolic)
return q2*p1 + r1*p2, q1*q2, r1*r2
# the acoth(x) series converges like the geometric series for x^2
# N = ceil(p*log(2)/(2*log(x)))
def acot_fixed(a, prec, hyperbolic):
"""
Compute acot(a) or acoth(a) for an integer a with binary splitting; see
http://numbers.computation.free.fr/Constants/Algorithms/splitting.html
"""
N = int(0.35 * prec/math.log(a) + 20)
p, q, r = bsp_acot(a, 0,N, hyperbolic)
return ((p+q)<<prec)//(q*a)
def machin(coefs, prec, hyperbolic=False):
"""
Evaluate a Machin-like formula, i.e., a linear combination of
acot(n) or acoth(n) for specific integer values of n, using fixed-
point arithmetic. The input should be a list [(c, n), ...], giving
c*acot[h](n) + ...
"""
extraprec = 10
s = MPZ_ZERO
for a, b in coefs:
s += MPZ(a) * acot_fixed(MPZ(b), prec+extraprec, hyperbolic)
return (s >> extraprec)
# Logarithms of integers are needed for various computations involving
# logarithms, powers, radix conversion, etc
@constant_memo
def ln2_fixed(prec):
"""
Computes ln(2). This is done with a hyperbolic Machin-type formula,
with binary splitting at high precision.
"""
return machin([(18, 26), (-2, 4801), (8, 8749)], prec, True)
@constant_memo
def ln10_fixed(prec):
"""
Computes ln(10). This is done with a hyperbolic Machin-type formula.
"""
return machin([(46, 31), (34, 49), (20, 161)], prec, True)
"""
For computation of pi, we use the Chudnovsky series:
oo
___ k
1 \ (-1) (6 k)! (A + B k)
----- = ) -----------------------
12 pi /___ 3 3k+3/2
(3 k)! (k!) C
k = 0
where A, B, and C are certain integer constants. This series adds roughly
14 digits per term. Note that C^(3/2) can be extracted so that the
series contains only rational terms. This makes binary splitting very
efficient.
The recurrence formulas for the binary splitting were taken from
ftp://ftp.gmplib.org/pub/src/gmp-chudnovsky.c
Previously, Machin's formula was used at low precision and the AGM iteration
was used at high precision. However, the Chudnovsky series is essentially as
fast as the Machin formula at low precision and in practice about 3x faster
than the AGM at high precision (despite theoretically having a worse
asymptotic complexity), so there is no reason not to use it in all cases.
"""
# Constants in Chudnovsky's series
CHUD_A = MPZ(13591409)
CHUD_B = MPZ(545140134)
CHUD_C = MPZ(640320)
CHUD_D = MPZ(12)
def bs_chudnovsky(a, b, level, verbose):
"""
Computes the sum from a to b of the series in the Chudnovsky
formula. Returns g, p, q where p/q is the sum as an exact
fraction and g is a temporary value used to save work
for recursive calls.
"""
if b-a == 1:
g = MPZ((6*b-5)*(2*b-1)*(6*b-1))
p = b**3 * CHUD_C**3 // 24
q = (-1)**b * g * (CHUD_A+CHUD_B*b)
else:
if verbose and level < 4:
print " binary splitting", a, b
mid = (a+b)//2
g1, p1, q1 = bs_chudnovsky(a, mid, level+1, verbose)
g2, p2, q2 = bs_chudnovsky(mid, b, level+1, verbose)
p = p1*p2
g = g1*g2
q = q1*p2 + q2*g1
return g, p, q
@constant_memo
def pi_fixed(prec, verbose=False, verbose_base=None):
"""
Compute floor(pi * 2**prec) as a big integer.
This is done using Chudnovsky's series (see comments in
libelefun.py for details).
"""
# The Chudnovsky series gives 14.18 digits per term
N = int(prec/3.3219280948/14.181647462 + 2)
if verbose:
print "binary splitting with N =", N
g, p, q = bs_chudnovsky(0, N, 0, verbose)
sqrtC = isqrt_fast(CHUD_C<<(2*prec))
v = p*CHUD_C*sqrtC//((q+CHUD_A*p)*CHUD_D)
return v
def degree_fixed(prec):
return pi_fixed(prec)//180
def bspe(a, b):
"""
Sum series for exp(1)-1 between a, b, returning the result
as an exact fraction (p, q).
"""
if b-a == 1:
return MPZ_ONE, MPZ(b)
m = (a+b)//2
p1, q1 = bspe(a, m)
p2, q2 = bspe(m, b)
return p1*q2+p2, q1*q2
@constant_memo
def e_fixed(prec):
"""
Computes exp(1). This is done using the ordinary Taylor series for
exp, with binary splitting. For a description of the algorithm,
see:
http://numbers.computation.free.fr/Constants/
Algorithms/splitting.html
"""
# Slight overestimate of N needed for 1/N! < 2**(-prec)
# This could be tightened for large N.
N = int(1.1*prec/math.log(prec) + 20)
p, q = bspe(0,N)
return ((p+q)<<prec)//q
@constant_memo
def phi_fixed(prec):
"""
Computes the golden ratio, (1+sqrt(5))/2
"""
prec += 10
a = isqrt_fast(MPZ_FIVE<<(2*prec)) + (MPZ_ONE << prec)
return a >> 11
mpf_phi = def_mpf_constant(phi_fixed)
mpf_pi = def_mpf_constant(pi_fixed)
mpf_e = def_mpf_constant(e_fixed)
mpf_degree = def_mpf_constant(degree_fixed)
mpf_ln2 = def_mpf_constant(ln2_fixed)
mpf_ln10 = def_mpf_constant(ln10_fixed)
@constant_memo
def ln_sqrt2pi_fixed(prec):
wp = prec + 10
# ln(sqrt(2*pi)) = ln(2*pi)/2
return to_fixed(mpf_log(mpf_shift(mpf_pi(wp), 1), wp), prec-1)
@constant_memo
def sqrtpi_fixed(prec):
return sqrt_fixed(pi_fixed(prec), prec)
mpf_sqrtpi = def_mpf_constant(sqrtpi_fixed)
mpf_ln_sqrt2pi = def_mpf_constant(ln_sqrt2pi_fixed)
#----------------------------------------------------------------------------#
# #
# Powers #
# #
#----------------------------------------------------------------------------#
def mpf_pow(s, t, prec, rnd=round_fast):
"""
Compute s**t. Raises ComplexResult if s is negative and t is
fractional.
"""
ssign, sman, sexp, sbc = s
tsign, tman, texp, tbc = t
if ssign and texp < 0:
raise ComplexResult("negative number raised to a fractional power")
if texp >= 0:
return mpf_pow_int(s, (-1)**tsign * (tman<<texp), prec, rnd)
# s**(n/2) = sqrt(s)**n
if texp == -1:
if tman == 1:
if tsign:
return mpf_div(fone, mpf_sqrt(s, prec+10,
reciprocal_rnd[rnd]), prec, rnd)
return mpf_sqrt(s, prec, rnd)
else:
if tsign:
return mpf_pow_int(mpf_sqrt(s, prec+10,
reciprocal_rnd[rnd]), -tman, prec, rnd)
return mpf_pow_int(mpf_sqrt(s, prec+10, rnd), tman, prec, rnd)
# General formula: s**t = exp(t*log(s))
# TODO: handle rnd direction of the logarithm carefully
c = mpf_log(s, prec+10, rnd)
return mpf_exp(mpf_mul(t, c), prec, rnd)
def int_pow_fixed(y, n, prec):
"""n-th power of a fixed point number with precision prec
Returns the power in the form man, exp,
man * 2**exp ~= y**n
"""
if n == 2:
return (y*y), 0
bc = bitcount(y)
exp = 0
workprec = 2 * (prec + 4*bitcount(n) + 4)
_, pm, pe, pbc = fone
while 1:
if n & 1:
pm = pm*y
pe = pe+exp
pbc += bc - 2
pbc = pbc + bctable[int(pm >> pbc)]
if pbc > workprec:
pm = pm >> (pbc-workprec)
pe += pbc - workprec
pbc = workprec
n -= 1
if not n:
break
y = y*y
exp = exp+exp
bc = bc + bc - 2
bc = bc + bctable[int(y >> bc)]
if bc > workprec:
y = y >> (bc-workprec)
exp += bc - workprec
bc = workprec
n = n // 2
return pm, pe
# froot(s, n, prec, rnd) computes the real n-th root of a
# positive mpf tuple s.
# To compute the root we start from a 50-bit estimate for r
# generated with ordinary floating-point arithmetic, and then refine
# the value to full accuracy using the iteration
# 1 / y \
# r = --- | (n-1) * r + ---------- |
# n+1 n \ n r_n**(n-1) /
# which is simply Newton's method applied to the equation r**n = y.
# With giant_steps(start, prec+extra) = [p0,...,pm, prec+extra]
# and y = man * 2**-shift one has
# (man * 2**exp)**(1/n) =
# y**(1/n) * 2**(start-prec/n) * 2**(p0-start) * ... * 2**(prec+extra-pm) *
# 2**((exp+shift-(n-1)*prec)/n -extra))
# The last factor is accounted for in the last line of froot.
def nthroot_fixed(y, n, prec, exp1):
start = 50
try:
y1 = rshift(y, prec - n*start)
r = MPZ(int(y1**(1.0/n)))
except OverflowError:
y1 = from_int(y1, start)
fn = from_int(n)
fn = mpf_rdiv_int(1, fn, start)
r = mpf_pow(y1, fn, start)
r = to_int(r)
extra = 10
extra1 = n
prevp = start
for p in giant_steps(start, prec+extra):
pm, pe = int_pow_fixed(r, n-1, prevp)
r2 = rshift(pm, (n-1)*prevp - p - pe - extra1)
B = lshift(y, 2*p-prec+extra1)//r2
r = (B + (n-1) * lshift(r, p-prevp))//n
prevp = p
return r
def mpf_nthroot(s, n, prec, rnd=round_fast):
"""nth-root of a positive number
Use the Newton method when faster, otherwise use x**(1/n)
"""
sign, man, exp, bc = s
if sign:
raise ComplexResult("nth root of a negative number")
if not man:
if s == fnan:
return fnan
if s == fzero:
if n > 0:
return fzero
if n == 0:
return fone
return finf
# Infinity
if not n:
return fnan
if n < 0:
return fzero
return finf
flag_inverse = False
if n < 2:
if n == 0:
return fone
if n == 1:
return mpf_pos(s, prec, rnd)
if n == -1:
return mpf_div(fone, s, prec, rnd)
# n < 0
rnd = reciprocal_rnd[rnd]
flag_inverse = True
extra_inverse = 5
prec += extra_inverse
n = -n
if n > 20 and (n >= 20000 or prec < int(233 + 28.3 * n**0.62)):
prec2 = prec + 10
fn = from_int(n)
nth = mpf_rdiv_int(1, fn, prec2)
r = mpf_pow(s, nth, prec2, rnd)
s = normalize(r[0], r[1], r[2], r[3], prec, rnd)
if flag_inverse:
return mpf_div(fone, s, prec-extra_inverse, rnd)
else:
return s
# Convert to a fixed-point number with prec2 bits.
prec2 = prec + 2*n - (prec%n)
# a few tests indicate that
# for 10 < n < 10**4 a bit more precision is needed
if n > 10:
prec2 += prec2//10
prec2 = prec2 - prec2%n
# Mantissa may have more bits than we need. Trim it down.
shift = bc - prec2
# Adjust exponents to make prec2 and exp+shift multiples of n.
sign1 = 0
es = exp+shift
if es < 0:
sign1 = 1
es = -es
if sign1:
shift += es%n
else:
shift -= es%n
man = rshift(man, shift)
extra = 10
exp1 = ((exp+shift-(n-1)*prec2)//n) - extra
rnd_shift = 0
if flag_inverse:
if rnd == 'u' or rnd == 'c':
rnd_shift = 1
else:
if rnd == 'd' or rnd == 'f':
rnd_shift = 1
man = nthroot_fixed(man+rnd_shift, n, prec2, exp1)
s = from_man_exp(man, exp1, prec, rnd)
if flag_inverse:
return mpf_div(fone, s, prec-extra_inverse, rnd)
else:
return s
def mpf_cbrt(s, prec, rnd=round_fast):
"""cubic root of a positive number"""
return mpf_nthroot(s, 3, prec, rnd)
#----------------------------------------------------------------------------#
# #
# Logarithms #
# #
#----------------------------------------------------------------------------#
def log_int_fixed(n, prec, ln2=None):
"""
Fast computation of log(n), caching the value for small n,
intended for zeta sums.
"""
if n in log_int_cache:
value, vprec = log_int_cache[n]
if vprec >= prec:
return value >> (vprec - prec)
wp = prec + 10
if wp <= LOG_TAYLOR_SHIFT:
if ln2 is None:
ln2 = ln2_fixed(wp)
r = bitcount(n)
x = n << (wp-r)
v = log_taylor_cached(x, wp) + r*ln2
else:
v = to_fixed(mpf_log(from_int(n), wp+5), wp)
if n < MAX_LOG_INT_CACHE:
log_int_cache[n] = (v, wp)
return v >> (wp-prec)
def agm_fixed(a, b, prec):
"""
Fixed-point computation of agm(a,b), assuming
a, b both close to unit magnitude.
"""
i = 0
while 1:
anew = (a+b)>>1
if i > 4 and abs(a-anew) < 8:
return a
b = isqrt_fast(a*b)
a = anew
i += 1
return a
def log_agm(x, prec):
"""
Fixed-point computation of -log(x) = log(1/x), suitable
for large precision. It is required that 0 < x < 1. The
algorithm used is the Sasaki-Kanada formula
-log(x) = pi/agm(theta2(x)^2,theta3(x)^2). [1]
For faster convergence in the theta functions, x should
be chosen closer to 0.
Guard bits must be added by the caller.
HYPOTHESIS: if x = 2^(-n), n bits need to be added to
account for the truncation to a fixed-point number,
and this is the only significant cancellation error.
The number of bits lost to roundoff is small and can be
considered constant.
[1] Richard P. Brent, "Fast Algorithms for High-Precision
Computation of Elementary Functions (extended abstract)",
http://wwwmaths.anu.edu.au/~brent/pd/RNC7-Brent.pdf
"""
x2 = (x*x) >> prec
# Compute jtheta2(x)**2
s = a = b = x2
while a:
b = (b*x2) >> prec
a = (a*b) >> prec
s += a
s += (MPZ_ONE<<prec)
s = (s*s)>>(prec-2)
s = (s*isqrt_fast(x<<prec))>>prec
# Compute jtheta3(x)**2
t = a = b = x
while a:
b = (b*x2) >> prec
a = (a*b) >> prec
t += a
t = (MPZ_ONE<<prec) + (t<<1)
t = (t*t)>>prec
# Final formula
p = agm_fixed(s, t, prec)
return (pi_fixed(prec) << prec) // p
def log_taylor(x, prec, r=0):
"""
Fixed-point calculation of log(x). It is assumed that x is close
enough to 1 for the Taylor series to converge quickly. Convergence
can be improved by specifying r > 0 to compute
log(x^(1/2^r))*2^r, at the cost of performing r square roots.
The caller must provide sufficient guard bits.
"""
for i in xrange(r):
x = isqrt_fast(x<<prec)
one = MPZ_ONE << prec
v = ((x-one)<<prec)//(x+one)
sign = v < 0
if sign:
v = -v
v2 = (v*v) >> prec
v4 = (v2*v2) >> prec
s0 = v
s1 = v//3
v = (v*v4) >> prec
k = 5
while v:
s0 += v // k
k += 2
s1 += v // k
v = (v*v4) >> prec
k += 2
s1 = (s1*v2) >> prec
s = (s0+s1) << (1+r)
if sign:
return -s
return s
def log_taylor_cached(x, prec):
"""
Fixed-point computation of log(x), assuming x in (0.5, 2)
and prec <= LOG_TAYLOR_PREC.
"""
n = x >> (prec-LOG_TAYLOR_SHIFT)
cached_prec = cache_prec_steps[prec]
dprec = cached_prec - prec
if (n, cached_prec) in log_taylor_cache:
a, log_a = log_taylor_cache[n, cached_prec]
else:
a = n << (cached_prec - LOG_TAYLOR_SHIFT)
log_a = log_taylor(a, cached_prec, 8)
log_taylor_cache[n, cached_prec] = (a, log_a)
a >>= dprec
log_a >>= dprec
u = ((x - a) << prec) // a
v = (u << prec) // ((MPZ_TWO << prec) + u)
v2 = (v*v) >> prec
v4 = (v2*v2) >> prec
s0 = v
s1 = v//3
v = (v*v4) >> prec
k = 5
while v:
s0 += v//k
k += 2
s1 += v//k
v = (v*v4) >> prec
k += 2
s1 = (s1*v2) >> prec
s = (s0+s1) << 1
return log_a + s
def mpf_log(x, prec, rnd=round_fast):
"""
Compute the natural logarithm of the mpf value x. If x is negative,
ComplexResult is raised.
"""
sign, man, exp, bc = x
#------------------------------------------------------------------
# Handle special values
if not man:
if x == fzero: return fninf
if x == finf: return finf
if x == fnan: return fnan
if sign:
raise ComplexResult("logarithm of a negative number")
wp = prec + 20
#------------------------------------------------------------------
# Handle log(2^n) = log(n)*2.
# Here we catch the only possible exact value, log(1) = 0
if man == 1:
if not exp:
return fzero
return from_man_exp(exp*ln2_fixed(wp), -wp, prec, rnd)
mag = exp+bc
abs_mag = abs(mag)
#------------------------------------------------------------------
# Handle x = 1+eps, where log(x) ~ x. We need to check for
# cancellation when moving to fixed-point math and compensate
# by increasing the precision. Note that abs_mag in (0, 1) <=>
# 0.5 < x < 2 and x != 1
if abs_mag <= 1:
# Calculate t = x-1 to measure distance from 1 in bits
tsign = 1-abs_mag
if tsign:
tman = (MPZ_ONE<<bc) - man
else:
tman = man - (MPZ_ONE<<(bc-1))
tbc = bitcount(tman)
cancellation = bc - tbc
if cancellation > wp:
t = normalize(tsign, tman, abs_mag-bc, tbc, tbc, 'n')
return mpf_perturb(t, tsign, prec, rnd)
else:
wp += cancellation
# TODO: if close enough to 1, we could use Taylor series
# even in the AGM precision range, since the Taylor series
# converges rapidly
#------------------------------------------------------------------
# Another special case:
# n*log(2) is a good enough approximation
if abs_mag > 10000:
if bitcount(abs_mag) > wp:
return from_man_exp(exp*ln2_fixed(wp), -wp, prec, rnd)
#------------------------------------------------------------------
# General case.
# Perform argument reduction using log(x) = log(x*2^n) - n*log(2):
# If we are in the Taylor precision range, choose magnitude 0 or 1.
# If we are in the AGM precision range, choose magnitude -m for
# some large m; benchmarking on one machine showed m = prec/20 to be
# optimal between 1000 and 100,000 digits.
if wp <= LOG_TAYLOR_PREC:
m = log_taylor_cached(lshift(man, wp-bc), wp)
if mag:
m += mag*ln2_fixed(wp)
else:
optimal_mag = -wp//LOG_AGM_MAG_PREC_RATIO
n = optimal_mag - mag
x = mpf_shift(x, n)
wp += (-optimal_mag)
m = -log_agm(to_fixed(x, wp), wp)
m -= n*ln2_fixed(wp)
return from_man_exp(m, -wp, prec, rnd)
def mpf_log_hypot(a, b, prec, rnd):
"""
Computes log(sqrt(a^2+b^2)) accurately.
"""
# If either a or b is inf/nan/0, assume it to be a
if not b[1]:
a, b = b, a
# a is inf/nan/0
if not a[1]:
# both are inf/nan/0
if not b[1]:
if a == b == fzero:
return fninf
if fnan in (a, b):
return fnan
# at least one term is (+/- inf)^2
return finf
# only a is inf/nan/0
if a == fzero:
# log(sqrt(0+b^2)) = log(|b|)
return mpf_log(mpf_abs(b), prec, rnd)
if a == fnan:
return fnan
return finf
# Exact
a2 = mpf_mul(a,a)
b2 = mpf_mul(b,b)
extra = 20
# Not exact
h2 = mpf_add(a2, b2, prec+extra)
cancelled = mpf_add(h2, fnone, 10)
mag_cancelled = cancelled[2]+cancelled[3]
# Just redo the sum exactly if necessary (could be smarter
# and avoid memory allocation when a or b is precisely 1
# and the other is tiny...)
if cancelled == fzero or mag_cancelled < -extra//2:
h2 = mpf_add(a2, b2, prec+extra-min(a2[2],b2[2]))
return mpf_shift(mpf_log(h2, prec, rnd), -1)
#----------------------------------------------------------------------
# Inverse tangent
#
def atan_newton(x, prec):
if prec >= 100:
r = math.atan((x>>(prec-53))/2.0**53)
else:
r = math.atan(x/2.0**prec)
prevp = 50
r = MPZ(int(r * 2.0**53) >> (53-prevp))
extra_p = 50
for wp in giant_steps(prevp, prec):
wp += extra_p
r = r << (wp-prevp)
cos, sin = cos_sin_fixed(r, wp)
tan = (sin << wp) // cos
a = ((tan-rshift(x, prec-wp)) << wp) // ((MPZ_ONE<<wp) + ((tan**2)>>wp))
r = r - a
prevp = wp
return rshift(r, prevp-prec)
def atan_taylor_get_cached(n, prec):
# Taylor series with caching wins up to huge precisions
# To avoid unnecessary precomputation at low precision, we
# do it in steps
# Round to next power of 2
prec2 = (1<<(bitcount(prec-1))) + 20
dprec = prec2 - prec
if (n, prec2) in atan_taylor_cache:
a, atan_a = atan_taylor_cache[n, prec2]
else:
a = n << (prec2 - ATAN_TAYLOR_SHIFT)
atan_a = atan_newton(a, prec2)
atan_taylor_cache[n, prec2] = (a, atan_a)
return (a >> dprec), (atan_a >> dprec)
def atan_taylor(x, prec):
n = (x >> (prec-ATAN_TAYLOR_SHIFT))
a, atan_a = atan_taylor_get_cached(n, prec)
d = x - a
s0 = v = (d << prec) // ((a**2 >> prec) + (a*d >> prec) + (MPZ_ONE << prec))
v2 = (v**2 >> prec)
v4 = (v2 * v2) >> prec
s1 = v//3
v = (v * v4) >> prec
k = 5
while v:
s0 += v // k
k += 2
s1 += v // k
v = (v * v4) >> prec
k += 2
s1 = (s1 * v2) >> prec
s = s0 - s1
return atan_a + s
def atan_inf(sign, prec, rnd):
if not sign:
return mpf_shift(mpf_pi(prec, rnd), -1)
return mpf_neg(mpf_shift(mpf_pi(prec, negative_rnd[rnd]), -1))
def mpf_atan(x, prec, rnd=round_fast):
sign, man, exp, bc = x
if not man:
if x == fzero: return fzero
if x == finf: return atan_inf(0, prec, rnd)
if x == fninf: return atan_inf(1, prec, rnd)
return fnan
mag = exp + bc
# Essentially infinity
if mag > prec+20:
return atan_inf(sign, prec, rnd)
# Essentially ~ x
if -mag > prec+20:
return mpf_perturb(x, 1-sign, prec, rnd)
wp = prec + 30 + abs(mag)
# For large x, use atan(x) = pi/2 - atan(1/x)
if mag >= 2:
x = mpf_rdiv_int(1, x, wp)
reciprocal = True
else:
reciprocal = False
t = to_fixed(x, wp)
if sign:
t = -t
if wp < ATAN_TAYLOR_PREC:
a = atan_taylor(t, wp)
else:
a = atan_newton(t, wp)
if reciprocal:
a = ((pi_fixed(wp)>>1)+1) - a
if sign:
a = -a
return from_man_exp(a, -wp, prec, rnd)
# TODO: cleanup the special cases
def mpf_atan2(y, x, prec, rnd=round_fast):
xsign, xman, xexp, xbc = x
ysign, yman, yexp, ybc = y
if not yman:
if y == fzero and x != fnan:
if mpf_sign(x) >= 0:
return fzero
return mpf_pi(prec, rnd)
if y in (finf, fninf):
if x in (finf, fninf):
return fnan
# pi/2
if y == finf:
return mpf_shift(mpf_pi(prec, rnd), -1)
# -pi/2
return mpf_neg(mpf_shift(mpf_pi(prec, negative_rnd[rnd]), -1))
return fnan
if ysign:
return mpf_neg(mpf_atan2(mpf_neg(y), x, prec, negative_rnd[rnd]))
if not xman:
if x == fnan:
return fnan
if x == finf:
return fzero
if x == fninf:
return mpf_pi(prec, rnd)
if y == fzero:
return fzero
return mpf_shift(mpf_pi(prec, rnd), -1)
tquo = mpf_atan(mpf_div(y, x, prec+4), prec+4)
if xsign:
return mpf_add(mpf_pi(prec+4), tquo, prec, rnd)
else:
return mpf_pos(tquo, prec, rnd)
def mpf_asin(x, prec, rnd=round_fast):
sign, man, exp, bc = x
if bc+exp > 0 and x not in (fone, fnone):
raise ComplexResult("asin(x) is real only for -1 <= x <= 1")
# asin(x) = 2*atan(x/(1+sqrt(1-x**2)))
wp = prec + 15
a = mpf_mul(x, x)
b = mpf_add(fone, mpf_sqrt(mpf_sub(fone, a, wp), wp), wp)
c = mpf_div(x, b, wp)
return mpf_shift(mpf_atan(c, prec, rnd), 1)
def mpf_acos(x, prec, rnd=round_fast):
# acos(x) = 2*atan(sqrt(1-x**2)/(1+x))
sign, man, exp, bc = x
if bc + exp > 0:
if x not in (fone, fnone):
raise ComplexResult("acos(x) is real only for -1 <= x <= 1")
if x == fnone:
return mpf_pi(prec, rnd)
wp = prec + 15
a = mpf_mul(x, x)
b = mpf_sqrt(mpf_sub(fone, a, wp), wp)
c = mpf_div(b, mpf_add(fone, x, wp), wp)
return mpf_shift(mpf_atan(c, prec, rnd), 1)
def mpf_asinh(x, prec, rnd=round_fast):
wp = prec + 20
sign, man, exp, bc = x
mag = exp+bc
if mag < -8:
if mag < -wp:
return mpf_perturb(x, 1-sign, prec, rnd)
wp += (-mag)
# asinh(x) = log(x+sqrt(x**2+1))
# use reflection symmetry to avoid cancellation
q = mpf_sqrt(mpf_add(mpf_mul(x, x), fone, wp), wp)
q = mpf_add(mpf_abs(x), q, wp)
if sign:
return mpf_neg(mpf_log(q, prec, negative_rnd[rnd]))
else:
return mpf_log(q, prec, rnd)
def mpf_acosh(x, prec, rnd=round_fast):
# acosh(x) = log(x+sqrt(x**2-1))
wp = prec + 15
if mpf_cmp(x, fone) == -1:
raise ComplexResult("acosh(x) is real only for x >= 1")
q = mpf_sqrt(mpf_add(mpf_mul(x,x), fnone, wp), wp)
return mpf_log(mpf_add(x, q, wp), prec, rnd)
def mpf_atanh(x, prec, rnd=round_fast):
# atanh(x) = log((1+x)/(1-x))/2
sign, man, exp, bc = x
if (not man) and exp:
if x in (fzero, fnan):
return x
raise ComplexResult("atanh(x) is real only for -1 <= x <= 1")
mag = bc + exp
if mag > 0:
if mag == 1 and man == 1:
return [finf, fninf][sign]
raise ComplexResult("atanh(x) is real only for -1 <= x <= 1")
wp = prec + 15
if mag < -8:
if mag < -wp:
return mpf_perturb(x, sign, prec, rnd)
wp += (-mag)
a = mpf_add(x, fone, wp)
b = mpf_sub(fone, x, wp)
return mpf_shift(mpf_log(mpf_div(a, b, wp), prec, rnd), -1)
def mpf_fibonacci(x, prec, rnd=round_fast):
sign, man, exp, bc = x
if not man:
if x == fninf:
return fnan
return x
# F(2^n) ~= 2^(2^n)
size = abs(exp+bc)
if exp >= 0:
# Exact
if size < 10 or size <= bitcount(prec):
return from_int(ifib(to_int(x)), prec, rnd)
# Use the modified Binet formula
wp = prec + size + 20
a = mpf_phi(wp)
b = mpf_add(mpf_shift(a, 1), fnone, wp)
u = mpf_pow(a, x, wp)
v = mpf_cos_pi(x, wp)
v = mpf_div(v, u, wp)
u = mpf_sub(u, v, wp)
u = mpf_div(u, b, prec, rnd)
return u
#-------------------------------------------------------------------------------
# Exponential-type functions
#-------------------------------------------------------------------------------
def exponential_series(x, prec, type=0):
"""
Taylor series for cosh/sinh or cos/sin.
type = 0 -- returns exp(x) (slightly faster than cosh+sinh)
type = 1 -- returns (cosh(x), sinh(x))
type = 2 -- returns (cos(x), sin(x))
"""
if x < 0:
x = -x
sign = 1
else:
sign = 0
r = int(0.5*prec**0.5)
xmag = bitcount(x) - prec
r = max(0, xmag + r)
extra = 10 + 2*max(r,-xmag)
wp = prec + extra
x <<= (extra - r)
one = MPZ_ONE << wp
alt = (type == 2)
if prec < EXP_SERIES_U_CUTOFF:
x2 = a = (x*x) >> wp
x4 = (x2*x2) >> wp
s0 = s1 = MPZ_ZERO
k = 2
while a:
a //= (k-1)*k; s0 += a; k += 2
a //= (k-1)*k; s1 += a; k += 2
a = (a*x4) >> wp
s1 = (x2*s1) >> wp
if alt:
c = s1 - s0 + one
else:
c = s1 + s0 + one
else:
u = int(0.3*prec**0.35)
x2 = a = (x*x) >> wp
xpowers = [one, x2]
for i in xrange(1, u):
xpowers.append((xpowers[-1]*x2)>>wp)
sums = [MPZ_ZERO] * u
k = 2
while a:
for i in xrange(u):
a //= (k-1)*k
if alt and k & 2: sums[i] -= a
else: sums[i] += a
k += 2
a = (a*xpowers[-1]) >> wp
for i in xrange(1, u):
sums[i] = (sums[i]*xpowers[i]) >> wp
c = sum(sums) + one
if type == 0:
s = isqrt_fast(c*c - (one<<wp))
if sign:
v = c - s
else:
v = c + s
for i in xrange(r):
v = (v*v) >> wp
return v >> extra
else:
# Repeatedly apply the double-angle formula
# cosh(2*x) = 2*cosh(x)^2 - 1
# cos(2*x) = 2*cos(x)^2 - 1
pshift = wp-1
for i in xrange(r):
c = ((c*c) >> pshift) - one
# With the abs, this is the same for sinh and sin
s = isqrt_fast(abs((one<<wp) - c*c))
if sign:
s = -s
return (c>>extra), (s>>extra)
def exp_basecase(x, prec):
"""
Compute exp(x) as a fixed-point number. Works for any x,
but for speed should have |x| < 1. For an arbitrary number,
use exp(x) = exp(x-m*log(2)) * 2^m where m = floor(x/log(2)).
"""
if prec > EXP_COSH_CUTOFF:
return exponential_series(x, prec, 0)
r = int(prec**0.5)
prec += r
s0 = s1 = (MPZ_ONE << prec)
k = 2
a = x2 = (x*x) >> prec
while a:
a //= k; s0 += a; k += 1
a //= k; s1 += a; k += 1
a = (a*x2) >> prec
s1 = (s1*x) >> prec
s = s0 + s1
u = r
while r:
s = (s*s) >> prec
r -= 1
return s >> u
def exp_expneg_basecase(x, prec):
"""
Computation of exp(x), exp(-x)
"""
if prec > EXP_COSH_CUTOFF:
cosh, sinh = exponential_series(x, prec, 1)
return cosh+sinh, cosh-sinh
a = exp_basecase(x, prec)
b = (MPZ_ONE << (prec+prec)) // a
return a, b
def cos_sin_basecase(x, prec):
"""
Compute cos(x), sin(x) as fixed-point numbers, assuming x
in [0, pi/2). For an arbitrary number, use x' = x - m*(pi/2)
where m = floor(x/(pi/2)) along with quarter-period symmetries.
"""
if prec > COS_SIN_CACHE_PREC:
return exponential_series(x, prec, 2)
precs = prec - COS_SIN_CACHE_STEP
t = x >> precs
n = int(t)
if n not in cos_sin_cache:
w = t<<(10+COS_SIN_CACHE_PREC-COS_SIN_CACHE_STEP)
cos_t, sin_t = exponential_series(w, 10+COS_SIN_CACHE_PREC, 2)
cos_sin_cache[n] = (cos_t>>10), (sin_t>>10)
cos_t, sin_t = cos_sin_cache[n]
offset = COS_SIN_CACHE_PREC - prec
cos_t >>= offset
sin_t >>= offset
x -= t << precs
cos = MPZ_ONE << prec
sin = x
k = 2
a = -((x*x) >> prec)
while a:
a //= k; cos += a; k += 1; a = (a*x) >> prec
a //= k; sin += a; k += 1; a = -((a*x) >> prec)
return ((cos*cos_t-sin*sin_t) >> prec), ((sin*cos_t+cos*sin_t) >> prec)
def mpf_exp(x, prec, rnd=round_fast):
sign, man, exp, bc = x
if man:
mag = bc + exp
wp = prec + 14
if sign:
man = -man
# TODO: the best cutoff depends on both x and the precision.
if prec > 600 and exp >= 0:
# Need about log2(exp(n)) ~= 1.45*mag extra precision
e = mpf_e(wp+int(1.45*mag))
return mpf_pow_int(e, man<<exp, prec, rnd)
if mag < -wp:
return mpf_perturb(fone, sign, prec, rnd)
# |x| >= 2
if mag > 1:
# For large arguments: exp(2^mag*(1+eps)) =
# exp(2^mag)*exp(2^mag*eps) = exp(2^mag)*(1 + 2^mag*eps + ...)
# so about mag extra bits is required.
wpmod = wp + mag
offset = exp + wpmod
if offset >= 0:
t = man << offset
else:
t = man >> (-offset)
lg2 = ln2_fixed(wpmod)
n, t = divmod(t, lg2)
n = int(n)
t >>= mag
else:
offset = exp + wp
if offset >= 0:
t = man << offset
else:
t = man >> (-offset)
n = 0
man = exp_basecase(t, wp)
return from_man_exp(man, n-wp, prec, rnd)
if not exp:
return fone
if x == fninf:
return fzero
return x
def mpf_cosh_sinh(x, prec, rnd=round_fast, tanh=0):
"""Simultaneously compute (cosh(x), sinh(x)) for real x"""
sign, man, exp, bc = x
if (not man) and exp:
if tanh:
if x == finf: return fone
if x == fninf: return fnone
return fnan
if x == finf: return (finf, finf)
if x == fninf: return (finf, fninf)
return fnan, fnan
mag = exp+bc
wp = prec+14
if mag < -4:
# Extremely close to 0, sinh(x) ~= x and cosh(x) ~= 1
if mag < -wp:
if tanh:
return mpf_perturb(x, 1-sign, prec, rnd)
cosh = mpf_perturb(fone, 0, prec, rnd)
sinh = mpf_perturb(x, sign, prec, rnd)
return cosh, sinh
# Fix for cancellation when computing sinh
wp += (-mag)
# Does exp(-2*x) vanish?
if mag > 10:
if 3*(1<<(mag-1)) > wp:
# XXX: rounding
if tanh:
return mpf_perturb([fone,fnone][sign], 1-sign, prec, rnd)
c = s = mpf_shift(mpf_exp(mpf_abs(x), prec, rnd), -1)
if sign:
s = mpf_neg(s)
return c, s
# |x| > 1
if mag > 1:
wpmod = wp + mag
offset = exp + wpmod
if offset >= 0:
t = man << offset
else:
t = man >> (-offset)
lg2 = ln2_fixed(wpmod)
n, t = divmod(t, lg2)
n = int(n)
t >>= mag
else:
offset = exp + wp
if offset >= 0:
t = man << offset
else:
t = man >> (-offset)
n = 0
a, b = exp_expneg_basecase(t, wp)
# TODO: optimize division precision
cosh = a + (b>>(2*n))
sinh = a - (b>>(2*n))
if sign:
sinh = -sinh
if tanh:
man = (sinh << wp) // cosh
return from_man_exp(man, -wp, prec, rnd)
else:
cosh = from_man_exp(cosh, n-wp-1, prec, rnd)
sinh = from_man_exp(sinh, n-wp-1, prec, rnd)
return cosh, sinh
def mod_pi2(man, exp, mag, wp):
# Reduce to standard interval
if mag > 0:
i = 0
while 1:
cancellation_prec = 20 << i
wpmod = wp + mag + cancellation_prec
pi2 = pi_fixed(wpmod-1)
pi4 = pi2 >> 1
offset = wpmod + exp
if offset >= 0:
t = man << offset
else:
t = man >> (-offset)
n, y = divmod(t, pi2)
if y > pi4:
small = pi2 - y
else:
small = y
if small >> (wp+mag-10):
n = int(n)
t = y >> mag
wp = wpmod - mag
break
i += 1
else:
wp += (-mag)
offset = exp + wp
if offset >= 0:
t = man << offset
else:
t = man >> (-offset)
n = 0
return t, n, wp
def mpf_cos_sin(x, prec, rnd=round_fast, which=0, pi=False):
"""
which:
0 -- return cos(x), sin(x)
1 -- return cos(x)
2 -- return sin(x)
3 -- return tan(x)
if pi=True, compute for pi*x
"""
sign, man, exp, bc = x
if not man:
if exp:
c, s = fnan, fnan
else:
c, s = fone, fzero
if which == 0: return c, s
if which == 1: return c
if which == 2: return s
if which == 3: return s
mag = bc + exp
wp = prec + 10
# Extremely small?
if mag < 0:
if mag < -wp:
if pi:
x = mpf_mul(x, mpf_pi(wp))
c = mpf_perturb(fone, 1, prec, rnd)
s = mpf_perturb(x, 1-sign, prec, rnd)
if which == 0: return c, s
if which == 1: return c
if which == 2: return s
if which == 3: return mpf_perturb(x, sign, prec, rnd)
if pi:
if exp >= -1:
if exp == -1:
c = fzero
s = (fone, fnone)[bool(man & 2) ^ sign]
elif exp == 0:
c, s = (fnone, fzero)
else:
c, s = (fone, fzero)
if which == 0: return c, s
if which == 1: return c
if which == 2: return s
if which == 3: return mpf_div(s, c, prec, rnd)
# Subtract nearest half-integer (= mod by pi/2)
n = ((man >> (-exp-2)) + 1) >> 1
man = man - (n << (-exp-1))
mag2 = bitcount(man) + exp
wp = prec + 10 - mag2
offset = exp + wp
if offset >= 0:
t = man << offset
else:
t = man >> (-offset)
t = (t*pi_fixed(wp)) >> wp
else:
t, n, wp = mod_pi2(man, exp, mag, wp)
c, s = cos_sin_basecase(t, wp)
m = n & 3
if m == 1: c, s = -s, c
elif m == 2: c, s = -c, -s
elif m == 3: c, s = s, -c
if sign:
s = -s
if which == 0:
c = from_man_exp(c, -wp, prec, rnd)
s = from_man_exp(s, -wp, prec, rnd)
return c, s
if which == 1:
return from_man_exp(c, -wp, prec, rnd)
if which == 2:
return from_man_exp(s, -wp, prec, rnd)
if which == 3:
return from_rational(s, c, prec, rnd)
def mpf_cos(x, prec, rnd=round_fast): return mpf_cos_sin(x, prec, rnd, 1)
def mpf_sin(x, prec, rnd=round_fast): return mpf_cos_sin(x, prec, rnd, 2)
def mpf_tan(x, prec, rnd=round_fast): return mpf_cos_sin(x, prec, rnd, 3)
def mpf_cos_sin_pi(x, prec, rnd=round_fast): return mpf_cos_sin(x, prec, rnd, 0, 1)
def mpf_cos_pi(x, prec, rnd=round_fast): return mpf_cos_sin(x, prec, rnd, 1, 1)
def mpf_sin_pi(x, prec, rnd=round_fast): return mpf_cos_sin(x, prec, rnd, 2, 1)
def mpf_cosh(x, prec, rnd=round_fast): return mpf_cosh_sinh(x, prec, rnd)[0]
def mpf_sinh(x, prec, rnd=round_fast): return mpf_cosh_sinh(x, prec, rnd)[1]
def mpf_tanh(x, prec, rnd=round_fast): return mpf_cosh_sinh(x, prec, rnd, tanh=1)
# Low-overhead fixed-point versions
def cos_sin_fixed(x, prec, pi2=None):
if pi2 is None:
pi2 = pi_fixed(prec-1)
n, t = divmod(x, pi2)
n = int(n)
c, s = cos_sin_basecase(t, prec)
m = n & 3
if m == 0: return c, s
if m == 1: return -s, c
if m == 2: return -c, -s
if m == 3: return s, -c
def exp_fixed(x, prec, ln2=None):
if ln2 is None:
ln2 = ln2_fixed(prec)
n, t = divmod(x, ln2)
n = int(n)
v = exp_basecase(t, prec)
if n >= 0:
return v << n
else:
return v >> (-n)
if BACKEND == 'sage':
try:
import sage.libs.mpmath.ext_libmp as _lbmp
mpf_sqrt = _lbmp.mpf_sqrt
mpf_exp = _lbmp.mpf_exp
mpf_log = _lbmp.mpf_log
mpf_cos = _lbmp.mpf_cos
mpf_sin = _lbmp.mpf_sin
mpf_pow = _lbmp.mpf_pow
exp_fixed = _lbmp.exp_fixed
cos_sin_fixed = _lbmp.cos_sin_fixed
log_int_fixed = _lbmp.log_int_fixed
except (ImportError, AttributeError):
print "Warning: Sage imports in libelefun failed"
| 30.670868
| 83
| 0.515161
|
795383a8f9813d81dd05f4ea27a42f59068dd891
| 383
|
py
|
Python
|
wagtail/migrations/0009_remove_auto_now_add_from_pagerevision_created_at.py
|
stevedya/wagtail
|
52e5abfe62547cdfd90ea7dfeb8bf5a52f16324c
|
[
"BSD-3-Clause"
] | 1
|
2022-02-09T05:25:30.000Z
|
2022-02-09T05:25:30.000Z
|
wagtail/migrations/0009_remove_auto_now_add_from_pagerevision_created_at.py
|
stevedya/wagtail
|
52e5abfe62547cdfd90ea7dfeb8bf5a52f16324c
|
[
"BSD-3-Clause"
] | null | null | null |
wagtail/migrations/0009_remove_auto_now_add_from_pagerevision_created_at.py
|
stevedya/wagtail
|
52e5abfe62547cdfd90ea7dfeb8bf5a52f16324c
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("wagtailcore", "0008_populate_latest_revision_created_at"),
]
operations = [
migrations.AlterField(
model_name="pagerevision",
name="created_at",
field=models.DateTimeField(),
),
]
| 21.277778
| 68
| 0.600522
|
795383c5fae3be3d56413c08a117c1d36503a16a
| 6,125
|
py
|
Python
|
ccnpy/Packet.py
|
mmosko/ccnpy
|
20d982e2e3845818fde7f3facdc8cbcdff323dbb
|
[
"Apache-2.0"
] | 1
|
2020-12-23T14:17:25.000Z
|
2020-12-23T14:17:25.000Z
|
ccnpy/Packet.py
|
mmosko/ccnpy
|
20d982e2e3845818fde7f3facdc8cbcdff323dbb
|
[
"Apache-2.0"
] | 1
|
2019-07-01T18:19:05.000Z
|
2019-07-02T05:35:52.000Z
|
ccnpy/Packet.py
|
mmosko/ccnpy
|
20d982e2e3845818fde7f3facdc8cbcdff323dbb
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Marc Mosko
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import array
import hashlib
import ccnpy
class Packet:
@classmethod
def create_interest(cls, body, hop_limit):
# TODO: Hard-coding the 8 is not good
fh = ccnpy.FixedHeader.create_interest(packet_length=8 + len(body), hop_limit=hop_limit)
return cls(header=fh, body=body)
@classmethod
def create_content_object(cls, body):
# TODO: Hard-coding the 8 is not good
fh = ccnpy.FixedHeader.create_content_object(packet_length=8 + len(body))
return cls(header=fh, body=body)
@classmethod
def create_signed_interest(cls, body, hop_limit, validation_alg, validation_payload):
# TODO: Hard-coding the 8 is not good
packet_length = 8 + len(body) + len(validation_alg) + len(validation_payload)
fh = ccnpy.FixedHeader.create_interest(packet_length=packet_length, hop_limit=hop_limit)
return cls(header=fh, body=body, validation_alg=validation_alg, validation_payload=validation_payload)
@classmethod
def create_signed_content_object(cls, body, validation_alg, validation_payload):
# TODO: Hard-coding the 8 is not good
packet_length = 8 + len(body) + len(validation_alg) + len(validation_payload)
fh = ccnpy.FixedHeader.create_content_object(packet_length=packet_length)
return cls(header=fh, body=body, validation_alg=validation_alg, validation_payload=validation_payload)
def __init__(self, header, body, validation_alg=None, validation_payload=None):
if not isinstance(header, ccnpy.FixedHeader):
raise TypeError("header is not ccnpy.FixedHeader")
if not (isinstance(body, ccnpy.Interest) or isinstance(body, ccnpy.ContentObject)):
raise TypeError("body is not ccnpy.Interest or ccnpy.ContentObject")
if validation_alg is not None and not isinstance(validation_alg, ccnpy.ValidationAlg):
raise TypeError("validation_alg must be ccnpy.ValidationAlg")
if validation_payload is not None and not isinstance(validation_payload, ccnpy.ValidationPayload):
raise TypeError("validation_payload must be ccnpy.ValidationPayload")
if (validation_alg is not None and validation_payload is None) or \
(validation_alg is None and validation_payload is not None):
raise TypeError("validation_alg and validation_payload must both be None or not None, not mixed")
self._header = header
self._body = body
self._validation_alg = validation_alg
self._validation_payload = validation_payload
self._wire_format = self.__serialize()
def __serialize(self):
byte_list = self._header.serialize()
byte_list.extend(self._body.serialize())
if self._validation_alg is not None:
byte_list.extend(self._validation_alg.serialize())
if self._validation_payload is not None:
byte_list.extend(self._validation_payload.serialize())
return array.array("B", byte_list)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return "{Packet: {%r, %r, %r, %r}}" % (self._header, self._body, self._validation_alg, self._validation_payload)
def __len__(self):
return len(self._wire_format)
@classmethod
def deserialize(cls, buffer):
header = body = val_alg = val_payload = None
offset = 0
header = ccnpy.FixedHeader.deserialize(buffer)
offset += header.header_length()
while offset < len(buffer):
tlv = ccnpy.Tlv.deserialize(buffer[offset:])
offset += len(tlv)
if tlv.type() == ccnpy.ContentObject.class_type():
assert body is None
body = ccnpy.ContentObject.parse(tlv)
elif tlv.type() == ccnpy.Interest.class_type():
assert body is None
body = ccnpy.Interest.parse(tlv)
elif tlv.type() == ccnpy.ValidationAlg.class_type():
assert val_alg is None
val_alg = ccnpy.ValidationAlg.parse(tlv)
elif tlv.type() == ccnpy.ValidationPayload.class_type():
assert val_alg is not None
assert val_payload is None
val_payload = ccnpy.ValidationPayload.parse(tlv)
else:
raise RuntimeError("Unsupported packet TLV type %r" % tlv.type())
return cls(header=header, body=body, validation_alg=val_alg, validation_payload=val_payload)
@classmethod
def load(cls, filename):
with open(filename, 'rb') as infile:
return cls.deserialize(array.array("B", infile.read()))
def serialize(self):
return self._wire_format
def save(self, filename):
with open(filename, 'wb') as outfile:
outfile.write(self.serialize().tobytes())
def header(self):
return self._header
def body(self):
return self._body
def validation_alg(self):
return self._validation_alg
def validation_payload(self):
return self._validation_payload
def content_object_hash(self):
h = hashlib.sha256()
h.update(self.body().serialize())
if self.validation_alg() is not None:
h.update(self.validation_alg().serialize())
if self.validation_payload() is not None:
h.update(self.validation_payload().serialize())
digest = h.digest()
tlv = ccnpy.HashValue.create_sha256(array.array("B", digest))
return tlv
| 39.772727
| 120
| 0.670531
|
795383ee3762bf5346f3509cda4bcc55f6988034
| 705
|
py
|
Python
|
msgvis/apps/corpus/migrations/0003_auto_20150210_1903.py
|
hds-lab/textvis-drg
|
bfb136b6105df84fb6c1c89cc595bf9e9f22c5fe
|
[
"MIT"
] | 10
|
2015-12-04T07:43:11.000Z
|
2021-01-23T00:44:56.000Z
|
msgvis/apps/corpus/migrations/0003_auto_20150210_1903.py
|
hds-lab/textvis-drg
|
bfb136b6105df84fb6c1c89cc595bf9e9f22c5fe
|
[
"MIT"
] | 200
|
2015-02-11T05:41:57.000Z
|
2015-11-13T03:47:25.000Z
|
msgvis/apps/corpus/migrations/0003_auto_20150210_1903.py
|
hds-lab/textvis-drg
|
bfb136b6105df84fb6c1c89cc595bf9e9f22c5fe
|
[
"MIT"
] | 6
|
2015-10-02T18:01:09.000Z
|
2021-01-23T00:44:58.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('corpus', '0002_auto_20150210_1903'),
]
operations = [
migrations.AlterField(
model_name='person',
name='full_name',
field=models.CharField(default=None, max_length=250, null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='person',
name='username',
field=models.CharField(default=None, max_length=150, null=True, blank=True),
preserve_default=True,
),
]
| 26.111111
| 88
| 0.6
|
795384eb087f5d16802dbc960fa5567de8cc2757
| 9,392
|
py
|
Python
|
src/fiz_lernmodule/word2vec.py
|
gingergenius/patent-embedding-visualization
|
6c7d34dd5de097fbaafac9f6e837fcbc233563b5
|
[
"Apache-2.0"
] | 2
|
2021-04-29T08:53:43.000Z
|
2022-02-17T11:54:52.000Z
|
src/fiz_lernmodule/word2vec.py
|
gingergenius/patent-embedding-visualization
|
6c7d34dd5de097fbaafac9f6e837fcbc233563b5
|
[
"Apache-2.0"
] | null | null | null |
src/fiz_lernmodule/word2vec.py
|
gingergenius/patent-embedding-visualization
|
6c7d34dd5de097fbaafac9f6e837fcbc233563b5
|
[
"Apache-2.0"
] | 1
|
2019-09-20T08:55:14.000Z
|
2019-09-20T08:55:14.000Z
|
"""
Used for patent landscaping use-case.
"""
import os
import pandas as pd
import tensorflow as tf
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
from scipy.spatial import distance
import seaborn as sns; sns.set()
class Word2Vec(object):
""" Word2Vec embedding. """
def __init__(self, train_graph, index_to_word, word_to_index,
embedding_weights, normed_embedding_weights):
"""
Args:
train_graph (tf.Graph): Graph that contains embeddings.
index_to_word (dict): maps from embedding idxs to words.
word_to_index (dict): maps from words to embedding idxs.
embedding_weights (np.array): embedding weights.
normed_embedding_weights (np.array): normalized embedding weights.
"""
self.train_graph = train_graph
self.index_to_word = index_to_word
self.word_to_index = word_to_index
self.embedding_weights = embedding_weights
self.normed_embedding_weights = normed_embedding_weights
def visualize_embeddings(self, word, num_words=100):
""" Creates a matplotlib plot based on similar words to `word`.
Function identifies `num_words` most similar words of `word` in embedding space.
Performs a dimensionality reduction with TSNE and displays the words in a plot.
Since TNSE uses SGD visualisations might differ even with identical inputs.
Args:
word (str): reference word.
num_words (int): Specifies number of words that is considered.
Returns:
Nothing, but creates a seaborn plot.
"""
similar_words = self.find_similar(word, num_words)
tsne = TSNE()
word1_index = self.word_to_index[word]
idxs = []
words = []
similarities = []
# appends the indexes of all similar words
for index in range(0, num_words):
idxs.append(similar_words[index]['index'])
words.append(similar_words[index]['word'])
similarities.append(similar_words[index]['distance'])
# appends index of `word` itself
idxs.append(word1_index)
words.append(word)
similarities.append(1)
embed_tsne = tsne.fit_transform(self.normed_embedding_weights[idxs, :])
fig, ax = plt.subplots(figsize=(14, 14))
data = {
"x": embed_tsne[:, 0],
"y": embed_tsne[:, 1],
"word": words,
"sim": similarities
}
plot_data = pd.DataFrame(data)
ax = sns.scatterplot(x="x", y="y", data = plot_data)
color = "black"
for idx in range(plot_data.shape[0]):
if idx == plot_data.shape[0]-1:
color = "red"
ax.text(plot_data.x[idx]+1, plot_data.y[idx], plot_data.word[idx], horizontalalignment='left', size="large", color=color)
def find_similar(self, word, top_k=10):
""" Finds the `top_k` most similar words to a reference (cosine distance).
Note: method is really slow!
Args:
word (str): reference word.
top_k (int): Specifies how many similar words will be retrieved.
Returns:
Ordered list of dictionaries. Each dictionary corresponds to a single word.
"""
distances = {}
if word in self.word_to_index:
word1_index = self.word_to_index[word]
word1_embed = self.embedding_weights[word1_index]
#print('Vocabulary size: {}'.format(len(self.embedding_weights)))
for index in range(0, len(self.embedding_weights)):
if index != word1_index:
word2_embed = self.embedding_weights[index]
word_dist = distance.cosine(word1_embed, word2_embed)
distances[index] = word_dist
top_k_similar = sorted(distances.items(), key=lambda x: x[1])[:top_k]
similar_words = []
for i in range(0, len(top_k_similar)):
similar_word_index = top_k_similar[i][0]
similar_word_dist = top_k_similar[i][1]
similar_word = self.index_to_word[similar_word_index]
similar_words.append(
{'word': similar_word,
'index': similar_word_index,
'distance': similar_word_dist})
return similar_words
else:
print("Couldn't find {}".format(word))
return []
def get_embedding(self, word, normed=True, verbose=False):
""" Returns the normalized embedding of a given word.
Args:
word (str): reference word.
Returns:
Embedding vector within a numpy array.
"""
if word in self.word_to_index:
word_idx = self.word_to_index[word]
else:
if (verbose):
print("Couldn't find {}. Using UNK instead. If this sounds wrong, consider normalizing text.".format(word))
word_idx = self.word_to_index['UNK']
if normed:
return self.normed_embedding_weights[word_idx]
else:
return self.embedding_weights[word_idx]
class Word2VecReader(object):
""" This class loads pre-trained word embeddings from Tensorflow checkpoints."""
def __init__(self, src_dir, vocab_size=50000):
"""
Args:
src_dir (str): specifies source directory of data.
vocab_size: vocabulary size
"""
self.src_dir = src_dir
if not os.path.exists(self.src_dir):
raise Exception('Datapath does not exist:\n "{}"'.format(self.src_dir))
self.model_name = '5.9m'
self.vocab_size = vocab_size
self.checkpoints_path = os.path.join(self.src_dir, self.model_name, 'checkpoints')
self.checkpoints_file = os.path.join(self.checkpoints_path, '{}.ckpt'.format(self.model_name))
self.vocab_dir = os.path.join(self.src_dir, self.model_name, 'vocab')
self.vocab_file = os.path.join(self.vocab_dir, 'vocab.csv')
self.config_file = os.path.join(self.vocab_dir, 'config.csv')
self.train_words_path = os.path.join(self.src_dir, self.model_name, 'train_words.pkl')
def load_mappings(self):
""" Loads mappings (index word-pairs) from CSV into two dictionaries.
Returns:
First dictionary maps indexes to words. Second dict maps vice versa.
"""
print("Load mappings from {}".format(self.vocab_file))
index_to_word = pd.read_csv(self.vocab_file, keep_default_na=False,
na_values=[], encoding='latin-1')
word_to_index = pd.read_csv(self.vocab_file, index_col='word',
keep_default_na=False, na_values=[], encoding='latin-1')
word_to_index.columns = ['index']
return index_to_word.to_dict()['word'], word_to_index.to_dict()['index']
def load_model_config(self):
""" Load loss-sampling-size and embedding size from config file.
Returns:
Dictionary with config settings.
"""
print("Load config from {}".format(self.config_file))
config = pd.read_csv(self.config_file)
config.columns = ['name', 'value']
config = config.set_index(config['name'])['value']
return config.to_dict()
def create_graph(self, vocab_size, embedding_size):
""" Creates a Tensorflow graph.
Args:
vocab_size: number of words in the vocabulary.
embedding_size: dimensionality of the word embedding.
Returns:
tf-graph, embeddings, normalized embeddings
"""
train_graph = tf.Graph()
n_vocab = vocab_size
n_embedding = embedding_size
with train_graph.as_default():
# create embedding weight matrix
embedding = tf.Variable(tf.random_uniform([n_vocab, n_embedding], minval=-1, maxval=1))
# normalize embeddings
norm = tf.sqrt(tf.reduce_sum(tf.square(embedding), 1, keepdims=True))
normalized_embedding = tf.div(embedding, norm)
return train_graph, embedding, normalized_embedding
def load_word_embeddings(self):
""" Loads word embeddings from the checkpoint specified during instantiation.
Returns:
Pre-trained Word2Vec instance
"""
index_to_word, word_to_index = self.load_mappings()
model_config = self.load_model_config()
embedding_size = int(model_config['embedding_size'])
train_graph, embedding, normalized_embedding = self.create_graph(len(index_to_word), embedding_size)
with tf.Session(graph=train_graph) as sess:
saver = tf.train.Saver()
saver.restore(sess, tf.train.latest_checkpoint(self.checkpoints_path))
embedding_weights, normed_embedding_weights = sess.run([embedding, normalized_embedding])
return Word2Vec(train_graph, index_to_word, word_to_index, embedding_weights, normed_embedding_weights)
| 38.024291
| 133
| 0.603599
|
79538500a99cd40a2b82f4832367a7e3706ca3b0
| 57
|
py
|
Python
|
conn.py
|
TechNoteGit/pywebview_example
|
be8268542fff7c3a415b74c7bf7943d82a5d81c1
|
[
"MIT"
] | null | null | null |
conn.py
|
TechNoteGit/pywebview_example
|
be8268542fff7c3a415b74c7bf7943d82a5d81c1
|
[
"MIT"
] | null | null | null |
conn.py
|
TechNoteGit/pywebview_example
|
be8268542fff7c3a415b74c7bf7943d82a5d81c1
|
[
"MIT"
] | 2
|
2021-09-11T01:26:30.000Z
|
2021-11-30T16:24:56.000Z
|
"""
For Global variable
"""
parent = None
child = None
| 9.5
| 21
| 0.631579
|
795385336d7a2d15af8fba034862c7ccffce9dd8
| 9,021
|
py
|
Python
|
tensorpack/tfutils/sessinit.py
|
awesome-archive/tensorpack
|
55f640f70e19d538e5082a4712241ee966fcb201
|
[
"Apache-2.0"
] | 2
|
2021-01-31T11:18:49.000Z
|
2021-02-24T14:42:16.000Z
|
tensorpack/tfutils/sessinit.py
|
yangxue0827/tensorpack
|
e5e54e07bb47f85fc7efe9c78bde3e153ef0d49b
|
[
"Apache-2.0"
] | null | null | null |
tensorpack/tfutils/sessinit.py
|
yangxue0827/tensorpack
|
e5e54e07bb47f85fc7efe9c78bde3e153ef0d49b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# File: sessinit.py
import numpy as np
import six
import tensorflow as tf
from ..utils import logger
from .common import get_op_tensor_name
from .varmanip import SessionUpdate, get_checkpoint_path, get_savename_from_varname, is_training_name
__all__ = ['SessionInit', 'ChainInit',
'SaverRestore', 'SaverRestoreRelaxed', 'DictRestore',
'JustCurrentSession', 'get_model_loader']
class SessionInit(object):
""" Base class for utilities to load variables to a (existing) session. """
def init(self, sess):
"""
Initialize a session
Args:
sess (tf.Session): the session
"""
self._setup_graph()
self._run_init(sess)
def _setup_graph(self):
pass
def _run_init(self, sess):
pass
class JustCurrentSession(SessionInit):
""" This is a no-op placeholder"""
pass
class CheckpointReaderAdapter(object):
"""
An adapter to work around old checkpoint format, where the keys are op
names instead of tensor names (with :0).
"""
def __init__(self, reader):
self._reader = reader
m = self._reader.get_variable_to_shape_map()
self._map = {k if k.endswith(':0') else k + ':0': v
for k, v in six.iteritems(m)}
def get_variable_to_shape_map(self):
return self._map
def get_tensor(self, name):
if self._reader.has_tensor(name):
return self._reader.get_tensor(name)
if name in self._map:
assert name.endswith(':0'), name
name = name[:-2]
return self._reader.get_tensor(name)
def has_tensor(self, name):
return name in self._map
# some checkpoint might not have ':0'
def get_real_name(self, name):
if self._reader.has_tensor(name):
return name
assert self.has_tensor(name)
return name[:-2]
class MismatchLogger(object):
def __init__(self, exists, nonexists):
self._exists = exists
self._nonexists = nonexists
self._names = []
def add(self, name):
self._names.append(get_op_tensor_name(name)[0])
def log(self):
if len(self._names):
logger.warn("The following variables are in the {}, but not found in the {}: {}".format(
self._exists, self._nonexists, ', '.join(self._names)))
class SaverRestore(SessionInit):
"""
Restore a tensorflow checkpoint saved by :class:`tf.train.Saver` or :class:`ModelSaver`.
"""
def __init__(self, model_path, prefix=None, ignore=[]):
"""
Args:
model_path (str): a model name (model-xxxx) or a ``checkpoint`` file.
prefix (str): during restore, add a ``prefix/`` for every variable in this checkpoint.
ignore (list[str]): list of tensor names that should be ignored during loading, e.g. learning-rate
"""
if model_path.endswith('.npy') or model_path.endswith('.npz'):
logger.warn("SaverRestore expect a TF checkpoint, but got a model path '{}'.".format(model_path) +
" To load from a dict, use 'DictRestore'.")
model_path = get_checkpoint_path(model_path)
self.path = model_path # attribute used by AutoResumeTrainConfig!
self.prefix = prefix
self.ignore = [i if i.endswith(':0') else i + ':0' for i in ignore]
def _setup_graph(self):
dic = self._get_restore_dict()
self.saver = tf.train.Saver(var_list=dic, name=str(id(dic)))
def _run_init(self, sess):
logger.info("Restoring checkpoint from {} ...".format(self.path))
self.saver.restore(sess, self.path)
@staticmethod
def _read_checkpoint_vars(model_path):
""" return a set of strings """
reader = tf.train.NewCheckpointReader(model_path)
reader = CheckpointReaderAdapter(reader) # use an adapter to standardize the name
ckpt_vars = reader.get_variable_to_shape_map().keys()
return reader, set(ckpt_vars)
def _match_vars(self, func):
reader, chkpt_vars = SaverRestore._read_checkpoint_vars(self.path)
graph_vars = tf.global_variables()
chkpt_vars_used = set()
mismatch = MismatchLogger('graph', 'checkpoint')
for v in graph_vars:
name = get_savename_from_varname(v.name, varname_prefix=self.prefix)
if name in self.ignore and reader.has_tensor(name):
logger.info("Variable {} in the graph will not be loaded from the checkpoint!".format(name))
else:
if reader.has_tensor(name):
func(reader, name, v)
chkpt_vars_used.add(name)
else:
# use tensor name (instead of op name) for logging, to be consistent with the reverse case
if not is_training_name(v.name):
mismatch.add(v.name)
mismatch.log()
mismatch = MismatchLogger('checkpoint', 'graph')
if len(chkpt_vars_used) < len(chkpt_vars):
unused = chkpt_vars - chkpt_vars_used
for name in sorted(unused):
if not is_training_name(name):
mismatch.add(name)
mismatch.log()
def _get_restore_dict(self):
var_dict = {}
def f(reader, name, v):
name = reader.get_real_name(name)
assert name not in var_dict, "Restore conflict: {} and {}".format(v.name, var_dict[name].name)
var_dict[name] = v
self._match_vars(f)
return var_dict
class SaverRestoreRelaxed(SaverRestore):
""" Same as :class:`SaverRestore`, but has more relaxed constraints.
It allows upcasting certain variables, or reshape certain
variables when there is a mismatch that can be fixed.
Another advantage is that it doesn't add any new ops to the graph.
But it is also slower than :class:`SaverRestore`.
"""
def _run_init(self, sess):
logger.info(
"Restoring checkpoint from {} ...".format(self.path))
def f(reader, name, v):
val = reader.get_tensor(name)
SessionUpdate.load_value_to_var(v, val)
with sess.as_default():
self._match_vars(f)
class DictRestore(SessionInit):
"""
Restore variables from a dictionary.
"""
def __init__(self, variable_dict):
"""
Args:
variable_dict (dict): a dict of {name: value}
"""
assert isinstance(variable_dict, dict), type(variable_dict)
# use varname (with :0) for consistency
self._prms = {get_op_tensor_name(n)[1]: v for n, v in six.iteritems(variable_dict)}
def _run_init(self, sess):
variables = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
variable_names = set([k.name for k in variables])
param_names = set(six.iterkeys(self._prms))
intersect = variable_names & param_names
logger.info("Variables to restore from dict: {}".format(', '.join(map(str, intersect))))
mismatch = MismatchLogger('graph', 'dict')
for k in sorted(variable_names - param_names):
if not is_training_name(k):
mismatch.add(k)
mismatch.log()
mismatch = MismatchLogger('dict', 'graph')
for k in sorted(param_names - variable_names):
mismatch.add(k)
mismatch.log()
upd = SessionUpdate(sess, [v for v in variables if v.name in intersect])
logger.info("Restoring {} variables from dict ...".format(len(intersect)))
upd.update({name: value for name, value in six.iteritems(self._prms) if name in intersect})
class ChainInit(SessionInit):
"""
Initialize a session by a list of :class:`SessionInit` instance, executed one by one.
This can be useful for, e.g., loading several models from different files
to form a composition of models.
"""
def __init__(self, sess_inits):
"""
Args:
sess_inits (list[SessionInit]): list of :class:`SessionInit` instances.
"""
self.inits = sess_inits
def _setup_graph(self):
for i in self.inits:
i._setup_graph()
def _run_init(self, sess):
for i in self.inits:
i._run_init(sess)
def get_model_loader(filename):
"""
Get a corresponding model loader by looking at the file name.
Returns:
SessInit: either a :class:`DictRestore` (if name ends with 'npy/npz') or
:class:`SaverRestore` (otherwise).
"""
assert isinstance(filename, six.string_types), filename
if filename.endswith('.npy'):
assert tf.gfile.Exists(filename), filename
return DictRestore(np.load(filename, encoding='latin1').item())
elif filename.endswith('.npz'):
assert tf.gfile.Exists(filename), filename
obj = np.load(filename)
return DictRestore(dict(obj))
else:
return SaverRestore(filename)
| 34.431298
| 110
| 0.618778
|
795385d37bf562e4cfb8a28c5c03bb672276b768
| 401
|
py
|
Python
|
sfaira/unit_tests/conftest.py
|
johnmous/sfaira
|
c50240a74530e614ab7681bf9c63b04cb815b361
|
[
"BSD-3-Clause"
] | 110
|
2020-09-08T07:47:15.000Z
|
2022-03-29T03:33:56.000Z
|
sfaira/unit_tests/conftest.py
|
johnmous/sfaira
|
c50240a74530e614ab7681bf9c63b04cb815b361
|
[
"BSD-3-Clause"
] | 405
|
2020-09-15T15:05:46.000Z
|
2022-03-16T14:44:23.000Z
|
sfaira/unit_tests/conftest.py
|
johnmous/sfaira
|
c50240a74530e614ab7681bf9c63b04cb815b361
|
[
"BSD-3-Clause"
] | 20
|
2021-03-30T15:30:14.000Z
|
2022-03-07T12:52:58.000Z
|
from pytest import fixture
def pytest_addoption(parser):
parser.addoption(
"--doi_sfaira_repr",
action="store"
)
parser.addoption(
"--test_data",
action="store"
)
@fixture()
def doi_sfaira_repr(request):
return request.config.getoption("--doi_sfaira_repr")
@fixture()
def test_data(request):
return request.config.getoption("--test_data")
| 17.434783
| 56
| 0.653367
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.